From 87eccb32d8c9ed386d5256759ec84e8e0951647b Mon Sep 17 00:00:00 2001 From: dairiki Date: Tue, 18 Sep 2001 19:16:24 +0000 Subject: [PATCH] Jeff's hacks II. This is a major change, to say the least. Some highlights: o Completely new database API. WARNING: all database schemas (currently MySQL, Postgres and DBA support is working) use completely revised schema, so you must start this new code with a new blank database... o WikiPlugins o New template engine. In addition, some more incremental changes: o Cascading Style Sheets reworked. o Expanded syntax for text search: e.g. "wiki OR wacky AND NOT page". o PhpWiki should now work with register_globals off. (Security issue.) o Edit preview button. (and probably more, which I'm forgetting about now.) Much of this code is still in a state of flux (particularly the new template engine code, and to a lesser extent the API for the plugins.) Feel free to play and hack on this, just be warned that some of it may still change quite a bit... See pgsrc/ReleaseNotes for a few more notes. And feel free to post questions or comments either publicly on , or privately, to . git-svn-id: svn://svn.code.sf.net/p/phpwiki/code/trunk@555 96ab9672-09ca-45d6-a79d-3d69d39ca109 --- DBLIB.txt | 159 +-- INSTALL | 27 +- INSTALL.flatfile | 5 +- INSTALL.mSQL | 5 +- INSTALL.mysql | 3 +- INSTALL.pgsql | 6 +- README | 4 +- UPGRADING | 11 +- index.php | 129 +- lib/ArchiveCleaner.php | 141 +++ lib/DbaDatabase.php | 154 +++ lib/DbaListSet.php | 142 +++ lib/DbaPartition.php | 85 ++ lib/ErrorManager.php | 357 ++++++ lib/PhpWikiDatabase.php | 50 - lib/Request.php | 310 +++++ lib/Template.php | 306 +++++ lib/TextSearchQuery.php | 610 ++++++++++ lib/WikiDB.php | 1151 ++++++++++++++++++ lib/WikiDB/SQL.php | 50 + lib/WikiDB/backend.php | 489 ++++++++ lib/WikiDB/backend/PearDB.php | 705 +++++++++++ lib/WikiDB/backend/PearDB_mysql.php | 46 + lib/WikiDB/backend/PearDB_pgsql.php | 76 ++ lib/WikiDB/backend/dba.php | 39 + lib/WikiDB/backend/dbaBase.php | 424 +++++++ lib/WikiDB/backend/dumb/AllRevisionsIter.php | 68 ++ lib/WikiDB/backend/dumb/BackLinkIter.php | 39 + lib/WikiDB/backend/dumb/MostPopularIter.php | 48 + lib/WikiDB/backend/dumb/MostRecentIter.php | 73 ++ lib/WikiDB/backend/dumb/TextSearchIter.php | 57 + lib/WikiDB/dba.php | 39 + lib/WikiPlugin.php | 279 +++++ lib/WikiUser.php | 229 ++++ lib/config.php | 60 +- lib/db_filesystem.php | 260 ---- lib/dbalib.php | 279 ----- lib/dbmlib.php | 509 -------- lib/diff.php | 171 ++- lib/display.php | 40 +- lib/editpage.php | 96 +- lib/fullsearch.php | 93 +- lib/interwiki.php | 17 +- lib/loadsave.php | 515 ++++---- lib/main.php | 389 +++--- lib/msql.php | 531 -------- lib/mysql.php | 447 ------- lib/pageinfo.php | 102 +- lib/pgsql.php | 475 -------- lib/prepend.php | 131 +- lib/savepage.php | 274 +++-- lib/search.php | 38 +- lib/stdlib.php | 571 ++++----- lib/transform.php | 100 +- lib/userauth.php | 198 --- lib/ziplib.php | 177 +-- pgsrc/BackLinks | 11 + pgsrc/DebugInfo | 1 + pgsrc/FindPage | 4 +- pgsrc/FullTextSearch | 15 + pgsrc/LikePages | 6 + pgsrc/MostPopular | 4 +- pgsrc/PhpWikiAdministration | 2 +- pgsrc/RecentChanges | 16 + pgsrc/ReleaseNotes | 10 + pgsrc/TitleSearch | 15 + pgsrc/WikiPlugin | 48 + phpwiki-heavy.css | 32 + phpwiki.css | 249 ++-- schemas/schema.mysql | 105 +- schemas/schema.psql | 189 ++- templates/README | 13 +- templates/browse.html | 169 ++- templates/editpage.html | 134 +- templates/message.html | 29 +- 75 files changed, 8196 insertions(+), 4645 deletions(-) create mode 100644 lib/ArchiveCleaner.php create mode 100644 lib/DbaDatabase.php create mode 100644 lib/DbaListSet.php create mode 100644 lib/DbaPartition.php create mode 100644 lib/ErrorManager.php delete mode 100644 lib/PhpWikiDatabase.php create mode 100644 lib/Request.php create mode 100644 lib/Template.php create mode 100644 lib/TextSearchQuery.php create mode 100644 lib/WikiDB.php create mode 100644 lib/WikiDB/SQL.php create mode 100644 lib/WikiDB/backend.php create mode 100644 lib/WikiDB/backend/PearDB.php create mode 100644 lib/WikiDB/backend/PearDB_mysql.php create mode 100644 lib/WikiDB/backend/PearDB_pgsql.php create mode 100644 lib/WikiDB/backend/dba.php create mode 100644 lib/WikiDB/backend/dbaBase.php create mode 100644 lib/WikiDB/backend/dumb/AllRevisionsIter.php create mode 100644 lib/WikiDB/backend/dumb/BackLinkIter.php create mode 100644 lib/WikiDB/backend/dumb/MostPopularIter.php create mode 100644 lib/WikiDB/backend/dumb/MostRecentIter.php create mode 100644 lib/WikiDB/backend/dumb/TextSearchIter.php create mode 100644 lib/WikiDB/dba.php create mode 100644 lib/WikiPlugin.php create mode 100644 lib/WikiUser.php delete mode 100644 lib/db_filesystem.php delete mode 100644 lib/dbalib.php delete mode 100644 lib/dbmlib.php delete mode 100644 lib/msql.php delete mode 100644 lib/mysql.php delete mode 100644 lib/pgsql.php delete mode 100644 lib/userauth.php create mode 100644 pgsrc/BackLinks create mode 100644 pgsrc/DebugInfo create mode 100644 pgsrc/FullTextSearch create mode 100644 pgsrc/LikePages create mode 100644 pgsrc/RecentChanges create mode 100644 pgsrc/TitleSearch create mode 100644 pgsrc/WikiPlugin create mode 100644 phpwiki-heavy.css diff --git a/DBLIB.txt b/DBLIB.txt index 91420cd4c..5cd1c3d43 100644 --- a/DBLIB.txt +++ b/DBLIB.txt @@ -1,159 +1,8 @@ -This is a description of the database interface for PhpWiki. Regardless -of what kind of data store is used (RDBMS, DBM files, flat text files) -you should be able to write a library that supports that data store. +This release uses a complete new database API. -A few notes: +For now, see lib/WikiDB.php for a description of the external API to the database. -* While most functions specify a "db reference" as the first value - passed in, this can be any kind of data type that your functions - know about. For example, in the DBM implementation this is a hash of - integers that refer to open database files, but in the MySQL - version it's an associative array that contains the DB information. +See lib/WikiDB/backend.php for more detail on how to write a new backend. -* Functions that return the page data must return a hash (associative - array) of all the data, where 'content' == the text of the page in Wiki - markup, 'version' is an integer representing the version, 'author' - the IP address or host name of the previous author and so on. See - the next paragraph for a precise description. - -* The data structure. This is commonly named $pagehash in the source - code; it's an associative array with values that are integers, - strings and arrays (i.e. a heterogenous data structure). Here's a - current description: - - $pagehash = { - author => string, - content => array (where each element is a line of the page), - created => integer (a number in Unix time since the Epoch), - flags => integer, - lastmodified => integer (also Unix time), - pagename => string, - version => integer - }; - -The functions are: - - OpenDataBase($dbname) - takes: a string, the name of the database - returns: a reference to the database (a handle) - - - CloseDataBase($dbi) - takes: a reference to the database (handle) - returns: the value of the close. For databases with persistent - connections, this doesn't return anything. - - - MakeDBHash($pagename, $pagehash) - takes: page name, page array - returns: an encoded version of the $pagehash suitable for - insertion into the data store. This is an internal helper - function used mainly for the RDBMSs. - - MakePageHash($dbhash) - takes: an array that came from the database - returns: the $pagehash data structure used by the - application. This function undoes what MakeDBHash does. - - RetrievePage($dbi, $pagename, $pagestore) - takes: db reference, string which is the name of a page, and a - string indicating which store to fetch the page from (live or archive). - returns: a PHP associative array containing the page data - (text, version, author, etc) - - - InsertPage($dbi, $pagename, $pagehash) - takes: db reference, page name (string), associative array - of all page data - returns: nothing (hmm. It should probably return true/false) - - SaveCopyToArchive($dbi, $pagename, $pagehash) - Similar to InsertPage but for handling the archive store. The - goal here was to separate the two (live db and archive db) in - case there were different storage formats (for example, the - archive might only store diffs of the pages). However this is - not the case in the implementations. - - IsWikiPage($dbi, $pagename) - takes: db reference, string containing page name - returns: true or false, if the page already exists in the live db. - - IsInArchive($dbi, $pagename) - takes: db reference, string containing page name - returns: true or false, if the page already exists in the archive. - - InitTitleSearch($dbi, $search) - takes: db reference, search string - returns: a handle to identify the query and the current position - within the result set. - - RemovePage($dbi, $pagename) - takes: db reference, name of the page - returns: nothing - This deletes a page from both the live and archive page stores. - - TitleSearchNextMatch($dbi, &$pos) - takes: db reference, reference to a hash created by - InitTitleSearch - returns: the next page name that contains a match to the search term - (advances $pos to next result field as well) - - MakeSQLSearchClause($search, $column) - takes: a search string, column name - returns: a SQL query string suitable for a database query - - InitFullSearch($dbi, $search) - takes: db reference, string containing search term - returns: similar to InitTitleSearch: a handle to identify the - query and the current position within the result set. - - - FullSearchNextMatch($dbi, &$pos) - takes: db reference, reference to a hash created by - InitFullSearch - returns: an associative array, where: - 'name' -- contains the page name - 'hash' -- contains the hash of the page data - (advances $pos to next result field as well) - - - IncreaseHitCount($dbi, $pagename) - takes: db reference, string (name of a page) - returns: nothing (MySQL implementation returns the last result - set but it is not used by the caller) - - - GetHitCount($dbi, $pagename) - takes: db reference, string (page name) - returns: an integer, the number of hits the page has received - - - InitMostPopular($dbi, $limit) - takes: a db reference and an integer, which is the limit of the - number of pages you want returned. - returns: the result set from the query - - - MostPopularNextMatch($dbi, $res) - takes: db reference, the result set returned by InitMostPopular - returns: the next row from the result set, as a PHP array type - - GetAllWikiPageNames($dbi) - takes: db reference - returns: an array containing all page names - - GetWikiPageLinks($dbi, $pagename) - takes: db reference, page name - returns: a two-dimensional array containing outbound links - ordered by score desc ('out'); inbound links ordered by score - desc ('in'); inbound or outbound links ordered by most number of - page views ('popular'). - - SetWikiPageLinks($dbi, $pagename, $linklist) - takes: db reference, page name, list of pages linking to this - one - This deletes the existing list of linking pages and inserts all - the page names in $linklist. - -$Id: DBLIB.txt,v 1.9 2001-02-17 05:35:56 dairiki Exp $ +$Id: DBLIB.txt,v 1.10 2001-09-18 19:16:23 dairiki Exp $ diff --git a/INSTALL b/INSTALL index c3e9606ec..710eb38a1 100644 --- a/INSTALL +++ b/INSTALL @@ -1,7 +1,6 @@ 0. INSTALLATION -PhpWiki requires PHP version 3.0.9 or greater, since it uses the -preg_*() family of functions. +PhpWiki requires PHP version 4.0.? or greater. Untar/gzip this file into the directory where you want it to live. That's it. @@ -9,20 +8,7 @@ That's it. bash$ gzip -d phpwiki-X.XX.tar.gz bash$ tar -xvf phpwiki-X.XX.tar -To improve efficiency, edit lib/config.php and set the $ServerAddress -by hand; this will save a regexp call on every invocation. - -Example: -Let's say you own the web server http://www.foo.com/. You untar in the -server's root directory; then you should be able to just go to your new -Wiki: - -http://www.foo.com/phpwiki/index.php - -If you configure your server to recognize index.php as the index of a -directory, you can just do: - -http://www.foo.com/phpwiki/ +Look at index.php and edit the settings there to your liking. 1. CONFIGURATION @@ -35,11 +21,11 @@ live site, archived pages, and some additional information. If you don't want the DBM files to live in /tmp you must make sure the web server can read/write to your chosen location. It's probably a bad idea -to leave it in /tmp. (Again, edit lib/config.php). +to leave it in /tmp. (Again, edit index.php). For example, you create a subdirectory called "pages" in the wiki directory made when you untarred PhpWiki. Move the DBM files there. -The files are called: wikipagesdb, wikiarchivedb, wikilinksdb, +(FIXME: this is incorrect:)The files are called: wikipagesdb, wikiarchivedb, wikilinksdb, wikihottopicsdb, and wikihitcountdb. The files should already have proper rights and owners, as they were created by the web server. Otherwise change them accordingly so your web server can read/write the DBM @@ -79,12 +65,13 @@ You should visit http://www.php.net/ if you don't have PHP. Note that you should have the web server configured to allow index.php as the root document of a directory. +FIXME: obsolete. (PHP 3 won't work) This web application was written under PHP version 3.0.12 and the latest build of PHP4. It's tested under the following systems: MySQL + Debian -mSQL + Red Hat 4.1 -DBM or Postgresql on Red Hat 6.2 +mSQL + Red Hat 4.1 (FIXME: msql currently won't work) +DBA or Postgresql on Red Hat 6.2 It reportedly works on Windows with Apache+PHP, which amazes me. diff --git a/INSTALL.flatfile b/INSTALL.flatfile index 8710c63bf..2e47638af 100644 --- a/INSTALL.flatfile +++ b/INSTALL.flatfile @@ -1,3 +1,6 @@ +FIXME: The flatfile backend has not yet been ported to the new database +scheme. For now, it is broken. + If you cannot run PhpWiki on top of a relational database like MySQL or Postgresql, and your system does not support DBM files or (worse) has a broken implementation like NDBM on Solaris, then @@ -129,4 +132,4 @@ phpwiki-talk list at phpwiki-talk@lists.sourceforge.net. Steve Wainstead swain@panix.com -$Id: INSTALL.flatfile,v 1.4 2001-04-06 18:21:36 wainstead Exp $ \ No newline at end of file +$Id: INSTALL.flatfile,v 1.5 2001-09-18 19:16:23 dairiki Exp $ diff --git a/INSTALL.mSQL b/INSTALL.mSQL index 5b7b69b13..b0b596adf 100644 --- a/INSTALL.mSQL +++ b/INSTALL.mSQL @@ -1,3 +1,6 @@ +FIXME: The flatfile backend has not yet been ported to the new database +scheme. For now, it is broken. + Note: mSQL will not be supported in the 1.3 development branch, unless someone wants to assume responsibility for it. When the new version of mSQL is released we might reconsider it, but there has been no demand @@ -56,4 +59,4 @@ README for more information, plus the comments in lib/config.php. --Steve Wainstead swain@panix.com -$Id: INSTALL.mSQL,v 1.4 2001-03-02 00:38:50 wainstead Exp $ \ No newline at end of file +$Id: INSTALL.mSQL,v 1.5 2001-09-18 19:16:23 dairiki Exp $ diff --git a/INSTALL.mysql b/INSTALL.mysql index 626f806ed..5a75458fb 100644 --- a/INSTALL.mysql +++ b/INSTALL.mysql @@ -1,3 +1,4 @@ +FIXME: these instructions are slightly broken. Installing phpwiki with mySQL ----------------------------- @@ -52,4 +53,4 @@ if (!extension_loaded("mysql")) { dl("mysql.so"); } /Arno ahollosi@mail.com -$Id: INSTALL.mysql,v 1.7 2001-07-15 15:49:44 wainstead Exp $ +$Id: INSTALL.mysql,v 1.8 2001-09-18 19:16:23 dairiki Exp $ diff --git a/INSTALL.pgsql b/INSTALL.pgsql index c830fee92..23f1f5056 100644 --- a/INSTALL.pgsql +++ b/INSTALL.pgsql @@ -1,3 +1,7 @@ +FIXME: The pgsql backend has not yet been ported to the new database +scheme. (Though it should not be very hard, and is certainly in +the works.) + ---------- NOTE for the 1.2 release: You may see a few warnings when you first load the pages. They may look like this: @@ -68,4 +72,4 @@ swain@panix.com Report bugs to phpwiki-talk@lists.sourceforge.net -$Id: INSTALL.pgsql,v 1.8 2001-03-03 19:43:14 wainstead Exp $ \ No newline at end of file +$Id: INSTALL.pgsql,v 1.9 2001-09-18 19:16:23 dairiki Exp $ diff --git a/README b/README index febcecdf3..f2d92af59 100644 --- a/README +++ b/README @@ -1,3 +1,5 @@ +FIXME: This is outdated. + This web application is licensed under the Gnu Public License, which should be included in the same directory as this README. A copy can be found at http://www.gnu.org/copyleft/gpl.txt. @@ -69,4 +71,4 @@ Steve Wainstead swain@panix.com http://wcsb.org/~swain/ -$Id: README,v 1.11 2001-03-03 19:43:14 wainstead Exp $ +$Id: README,v 1.12 2001-09-18 19:16:23 dairiki Exp $ diff --git a/UPGRADING b/UPGRADING index ccacc4bd8..f2a6a4337 100644 --- a/UPGRADING +++ b/UPGRADING @@ -1,3 +1,12 @@ +FIXME: WARNING WARNING: the schemas used by the new databases +are completely incompatible with schemas in any previous version +of PhpWiki. If you install this new PhpWiki, you must start with a +new empty database (currently either mysql or dba). (It will be +filled with the usual default pages.) + +FIXME: add more. + + More comprehensive updgrading information is forthcoming... however in the meantime, this message is in the Open Discussion board on Sourceforge (see URL below for the thread). In it Jeff describes how @@ -45,4 +54,4 @@ specified in WIKI_PGSRC. Jeff -$Id: UPGRADING,v 1.2 2001-04-06 18:21:36 wainstead Exp $ \ No newline at end of file +$Id: UPGRADING,v 1.3 2001-09-18 19:16:23 dairiki Exp $ diff --git a/index.php b/index.php index cfc33b969..b7c646e1f 100644 --- a/index.php +++ b/index.php @@ -21,9 +21,9 @@ ///////////////////////////////////////////////////////////////////// // Part Null: Don't touch this! -define ('PHPWIKI_VERSION', '1.3.0pre'); +define ('PHPWIKI_VERSION', '1.3.0-jeffs-hacks'); require "lib/prepend.php"; -rcs_id('$Id: index.php,v 1.20 2001-07-20 17:40:12 dairiki Exp $'); +rcs_id('$Id: index.php,v 1.21 2001-09-18 19:16:23 dairiki Exp $'); ///////////////////////////////////////////////////////////////////// // @@ -80,43 +80,107 @@ define('ALLOW_BOGO_LOGIN', true); // $DBParams = array( // Select the database type: - // Uncomment one of these, or leave all commented for the default - // data base type ('dba' if supported, else 'dbm'.) - //'dbtype' => 'dba', - //'dbtype' => 'dbm', - //'dbtype' => 'mysql', - //'dbtype' => 'pgsql', - //'dbtype' => 'msql', - //'dbtype' => 'file', + //'dbtype' => 'SQL', + 'dbtype' => 'dba', + + // For SQL based backends, specify the database as a DSN + // The most general form of a DSN looks like: + // + // phptype(dbsyntax)://username:password@protocol+hostspec/database + // + // For a MySQL database, the following should work: + // + // mysql://user:password@host/databasename + // + // FIXME: My version Pear::DB seems to be broken enough that there is + // no way to connect to a mysql server over a socket right now. + //'dsn' => 'mysql://guest@:/var/lib/mysql/mysql.sock/test', + //'dsn' => 'mysql://guest@localhost/test', + 'dsn' => 'pgsql://localhost/test', // Used by all DB types: - 'database' => 'wiki', + // prefix for filenames or table names /* * currently you MUST EDIT THE SQL file too (in the schemas/ directory * because we aren't doing on the fly sql generation during the * installation. */ - 'prefix' => 'phpwiki_', + //'prefix' => 'phpwiki_', - // Used by 'dbm', 'dba', 'file' + // Used by 'dba' 'directory' => "/tmp", - - // 'dbm' and 'dba create files named "$directory/${database}{$prefix}*". - // 'file' creates files named "$directory/${database}/{$prefix}*/*". - // The sql types use tables named "{$prefix}*" - - // Used by 'dbm', 'dba' + 'dba_handler' => 'gdbm', // Either of 'gdbm' or 'db2' work great for me. + //'dba_handler' => 'db2', + //'dba_handler' => 'db3', // doesn't work at all for me.... 'timeout' => 20, - - // Used by *sql as neccesary to log in to server: - 'server' => 'localhost', - 'port' => '', - 'socket' => '', - 'user' => 'guest', - 'password' => '' + //'timeout' => 5 ); +///////////////////////////////////////////////////////////////////// +// +// The next section controls how many old revisions of each page +// are kept in the database. +// +// There are two basic classes of revisions: major and minor. +// Which class a revision belongs in is determined by whether the +// author checked the "this is a minor revision" checkbox when they +// saved the page. +// +// There is, additionally, a third class of revisions: author revisions. +// The most recent non-mergable revision from each distinct author is +// and author revision. +// +// The expiry parameters for each of those three classes of revisions +// can be adjusted seperately. For each class there are five +// parameters (usually, only two or three of the five are actually set) +// which control how long those revisions are kept in the database. +// +// max_keep: If set, this specifies an absolute maximum for the number +// of archived revisions of that class. This is meant to be +// used as a safety cap when a non-zero min_age is specified. +// It should be set relatively high, and it's purpose is to +// prevent malicious or accidental database overflow due +// to someone causing an unreasonable number of edits in a short +// period of time. +// +// min_age: Revisions younger than this (based upon the supplanted date) +// will be kept unless max_keep is exceeded. The age should +// be specified in days. It should be a non-negative, +// real number, +// +// min_keep: At least this many revisions will be kept. +// +// keep: No more than this many revisions will be kept. +// +// max_age: No revision older than this age will be kept. +// +// Supplanted date: Revisions are timestamped at the instant that they cease +// being the current revision. Revision age is computed using this timestamp, +// not the edit time of the page. +// +// Merging: When a minor revision is deleted, if the preceding revision is by +// the same author, the minor revision is merged with the preceding revision +// before it is deleted. Essentially: this replaces the content (and supplanted +// timestamp) of the previous revision with the content after the merged minor +// edit, the rest of the page metadata for the preceding version (summary, mtime, ...) +// is not changed. +// +// Keep up to 8 major edits, but keep them no longer than a month. +$ExpireParams['major'] = array('max_age' => 32, + 'keep' => 8); +// Keep up to 4 minor edits, but keep them no longer than a week. +$ExpireParams['minor'] = array('max_age' => 7, + 'keep' => 4); +// Keep the latest contributions of the last 8 authors up to a year. +// Additionally, (in the case of a particularly active page) try to keep the +// latest contributions of all authors in the last week (even if there are +// more than eight of them,) but in no case keep more than twenty unique +// author revisions. +$ExpireParams['author'] = array('max_age' => 365, + 'keep' => 8, + 'min_age' => 7, + 'max_keep' => 20); ///////////////////////////////////////////////////////////////////// // @@ -160,6 +224,10 @@ $LANG='C'; // index.php (this file) resides.) // CSS location +// +// Note that if you use the stock phpwiki style sheet, 'phpwiki.css', +// you should make sure that it's companion 'phpwiki-heavy.css' +// is installed in the same directory that the base style file is. define("CSS_URL", "phpwiki.css"); // logo image (path relative to index.php) @@ -175,6 +243,7 @@ $logo = "images/wikibase.png"; $datetimeformat = "%B %e, %Y"; // may contain time of day $dateformat = "%B %e, %Y"; // must not contain time +// FIXME: delete // this defines how many page names to list when displaying // the MostPopular pages; the default is to show the 20 most popular pages define("MOST_POPULAR_LIST_LENGTH", 20); @@ -197,6 +266,7 @@ $templates = array("BROWSE" => "templates/browse.html", */ define('WIKI_PGSRC', "pgsrc"); // Default (old) behavior. //define('WIKI_PGSRC', 'wiki.zip'); // New style. +//define('WIKI_PGSRC', '../../../Logs/Hamwiki/hamwiki-20010830.zip'); // New style. // DEFAULT_WIKI_PGSRC is only used when the language is *not* // the default (English) and when reading from a directory: @@ -313,9 +383,12 @@ define('INTERWIKI_MAP_FILE', "lib/interwiki.map"); include "lib/main.php"; -// For emacs users +// (c-file-style: "gnu") // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/ArchiveCleaner.php b/lib/ArchiveCleaner.php new file mode 100644 index 000000000..96f4eaf11 --- /dev/null +++ b/lib/ArchiveCleaner.php @@ -0,0 +1,141 @@ +expire_params = $expire_params; + } + + function isMergeable($revision) { + if ( ! $revision->get('is_minor_edit') ) + return false; + + $page = $revision->getPage(); + $author_id = $revision->get('author_id'); + + $previous = $page->getRevisionBefore($revision); + + return !empty($author_id) + && $author_id == $previous->get('author_id'); + } + + function cleanDatabase($dbi) { + $iter = $dbi->getAllPages(); + while ($page = $iter->next()) + $this->cleanPageRevisions($page); + } + + function cleanPageRevisions($page) { + + $expire = &$this->expire_params; + foreach (array('major', 'minor', 'author') as $class) + $counter[$class] = new ArchiveCleaner_Counter($expire[$class]); + + $authors_seen = array(); + + $current = $page->getCurrentRevision(); + + for ( $revision = $page->getRevisionBefore($current); + $revision->getVersion() > 0; + $revision = $page->getRevisionBefore($revision) ) { + + if ($revision->get('is_minor_edit')) + $keep = $counter['minor']->keep($revision); + else + $keep = $counter['major']->keep($revision); + + if ($this->isMergeable($revision)) { + if (!$keep) { + $page->mergeRevision($revision); + } + } + else { + $author_id = $revision->get('author_id'); + if (empty($authors_seen[$author_id])) { + if ($counter['author']->keep($revision)) + $keep = true; + $authors_seen[$author_id] = true; + } + if (!$keep) { + $page->deleteRevision($revision); + } + } + } + } +} + +/** + * @access private + */ +class ArchiveCleaner_Counter +{ + function ArchiveCleaner_Counter($params) { + + extract($params); + $INFINITY = 0x7fffffff; + + $this->max_keep = isset($max_keep) ? $max_keep : $INFINITY; + + $this->min_age = isset($min_age) ? $min_age : 0; + $this->min_keep = isset($min_keep) ? $min_keep : 0; + + $this->max_age = isset($max_age) ? $max_age : $INFINITY; + $this->keep = isset($keep) ? $keep : $INFINITY; + + if ($this->keep > $this->max_keep) + $this->keep = $this->max_keep; + if ($this->min_keep > $this->keep) + $this->min_keep = $this->keep; + + if ($this->min_age > $this->max_age) + $this->min_age = $this->max_age; + + $this->now = time(); + $this->count = 0; + $this->previous_supplanted = false; + + } + + function computeAge($revision) { + $supplanted = $revision->get('_supplanted'); + + if (!$supplanted) { + // Every revision but the most recent should have a supplanted time. + // However, if it doesn't... + assert($supplanted > 0); + // Assuming revisions are chronologically ordered, the previous + // supplanted time is a good value to use... + if ($this->previous_supplanted > 0) + $supplanted = $this->previous_supplanted; + else { + // no supplanted timestamp. + // don't delete this revision based on age. + return 0; + } + } + + $this->last_supplanted = $supplanted; + return ($this->now - $supplanted) / (24 * 3600); + } + + function keep($revision) { + $count = ++$this->count; + $age = $this->computeAge($revision); + + if ($count > $this->max_keep) + return false; + if ($age <= $this->min_age || $count <= $this->min_keep) + return true; + return $age <= $this->max_age && $count <= $this->keep; + } +} + + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> \ No newline at end of file diff --git a/lib/DbaDatabase.php b/lib/DbaDatabase.php new file mode 100644 index 000000000..6422028ce --- /dev/null +++ b/lib/DbaDatabase.php @@ -0,0 +1,154 @@ +_file = $filename; + $this->_handler = $handler; + $this->_timeout = DBA_DATABASE_DEFAULT_TIMEOUT; + $this->_dbh = false; + if ($mode) + $this->open($mode); + } + + function set_timeout($timeout) { + $this->_timeout = $timeout; + } + + function open($mode = 'w') { + if ($this->_dbh) + return; // already open. + + $watchdog = $this->_timeout; + + global $ErrorManager; + $this->_dba_open_error = false; + $ErrorManager->pushErrorHandler(array($this, '_dba_open_error_handler')); + while (($dbh = dba_open($this->_file, $mode, $this->_handler)) < 1) { + if (--$watchdog <= 0) + break; + flush(); + sleep(1); + } + $ErrorManager->popErrorHandler(); + + if (!$dbh) { + if ( ($error = $this->_dba_open_error) ) { + $error->errno = E_USER_ERROR; + $ErrorManager->handleError($error); + } + else { + trigger_error("dba_open failed", E_USER_ERROR); + } + } + $this->_dbh = $dbh; + return !empty($dbh); + } + + function close() { + if ($this->_dbh) + dba_close($this->_dbh); + $this->_dbh = false; + } + + function exists($key) { + return dba_exists($key, $this->_dbh); + } + + function fetch($key) { + $val = dba_fetch($key, $this->_dbh); + if ($val === false) + return $this->_error("fetch($key)"); + return $val; + } + + function insert($key, $val) { + if (!dba_insert($key, $val, $this->_dbh)) + return $this->_error("insert($key)"); + } + + function replace($key, $val) { + if (!dba_replace($key, $val, $this->_dbh)) + return $this->_error("replace($key)"); + } + + + function firstkey() { + return dba_firstkey($this->_dbh); + } + + function nextkey() { + return dba_nextkey($this->_dbh); + } + + function delete($key) { + if (!dba_delete($key, $this->_dbh)) + return $this->_error("delete($key)"); + } + + function get($key) { + return dba_fetch($key, $this->_dbh); + } + + function set($key, $val) { + $dbh = &$this->_dbh; + if (dba_exists($key, $dbh)) { + if ($val !== false) { + if (!dba_replace($key, $val, $dbh)) + return $this->_error("store[replace]($key)"); + } + else { + if (!dba_delete($key, $dbh)) + return $this->_error("store[delete]($key)"); + } + } + else { + if (!dba_insert($key, $val, $this->_dbh)) + return $this->_error("store[insert]($key)"); + } + } + + function sync() { + if (!dba_sync($this->_dbh)) + return $this->_error("sync()"); + } + + function optimize() { + if (!dba_optimize($this->_dbh)) + return $this->_error("optimize()"); + } + + function _error($mes) { + trigger_error("DbaDatabase: $mes", E_USER_WARNING); + return false; + + trigger_error("$this->_file: dba error: $mes", E_USER_ERROR); + } + + function _dump() { + $dbh = &$this->_dbh; + for ($key = $this->firstkey($dbh); $key; $key = $this->nextkey($dbh)) + printf("%10s: %s\n", $key, $this->fetch($key)); + } + + function _dba_open_error_handler ($error) { + $this->_dba_open_error = $error; + return true; + } +} + + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/DbaListSet.php b/lib/DbaListSet.php new file mode 100644 index 000000000..ba45cd284 --- /dev/null +++ b/lib/DbaListSet.php @@ -0,0 +1,142 @@ +_dbh = &$dbh; + } + + function create_sequence($seq) { + $dbh = &$this->_dbh; + + if (!$dbh->exists('max_key')) { + // echo "initializing DbaListSet"; + // FIXME: check to see if it's really empty? + $dbh->insert('max_key', 0); + } + + $key = "s" . urlencode($seq); + assert(intval($key) == 0 && !strstr($key, ':')); + if (!$dbh->exists($key)) + $dbh->insert($key, "$key:$key:"); + } + + function delete_sequence($seq) { + $key = "s" . urlencode($seq); + for ($i = $this->firstkey($seq); $i; $i = $next) { + $next = $this->next($i); + $this->delete($i); + } + $this->_dbh->delete($key); + } + + function firstkey($seq) { + $key = "s" . urlencode($seq); + list(, $next) = explode(':', $this->_dbh->fetch($key), 3); + return intval($next); + } + + function lastkey($seq) { + $key = "s" . urlencode($seq); + list($prev) = explode(':', $this->_dbh->fetch($key), 3); + return intval($prev); + } + + + function next($i) { + list( , $next, ) = explode(':', $this->_dbh->fetch(intval($i)), 3); + return intval($next); + } + + function prev(&$i) { + list( $prev , , ) = explode(':', $this->_dbh->fetch(intval($i)), 3); + return intval($prev); + } + + function exists($i) { + $i = intval($i); + return $i && $this->_dbh->exists($i); + } + + function fetch($i) { + list(, , $data) = explode(':', $this->_dbh->fetch(intval($i)), 3); + return $data; + } + + function replace($i, $data) { + $dbh = &$this->_dbh; + list($prev, $next,) = explode(':', $dbh->fetch(intval($i)), 3); + $dbh->replace($i, "$prev:$next:$data"); + } + + function insert_before($i, $data) { + assert(intval($i)); + return $this->_insert_before_nc($i, $data); + } + + function insert_after($i, $data) { + assert(intval($i)); + return $this->_insert_after_nc($i, $data); + } + + function append($seq, $data) { + $key = "s" . urlencode($seq); + $this->_insert_before_nc($key, $data); + } + + function prepend($seq, $data) { + $key = "s" . urlencode($seq); + $this->_insert_after_nc($key, $data); + } + + function _insert_before_nc($i, &$data) { + $newkey = $this->_new_key(); + $old_prev = $this->_setprev($i, $newkey); + $this->_setnext($old_prev, $newkey); + $this->_dbh->insert($newkey, "$old_prev:$i:$data"); + return $newkey; + } + + function _insert_after_nc($i, &$data) { + $newkey = $this->_new_key(); + $old_next = $this->_setnext($i, $newkey); + $this->_setprev($old_next, $newkey); + $this->_dbh->insert($newkey, "$i:$old_next:$data"); + return $newkey; + } + + function delete($i) { + $dbh = &$this->_dbh; + list($prev, $next) = explode(':', $dbh->fetch(intval($i)), 3); + $this->_setnext($prev, $next); + $this->_setprev($next, $prev); + $dbh->delete(intval($i)); + } + + function _new_key() { + $dbh = &$this->_dbh; + $new_key = $dbh->fetch('max_key') + 1; + $dbh->replace('max_key', $new_key); + return $new_key; + } + + function _setprev($i, $new_prev) { + $dbh = &$this->_dbh; + list($old_prev, $next, $data) = explode(':', $dbh->fetch($i), 3); + $dbh->replace($i, "$new_prev:$next:$data"); + return $old_prev; + } + + function _setnext($i, $new_next) { + $dbh = &$this->_dbh; + list($prev, $old_next, $data) = explode(':', $dbh->fetch($i), 3); + $dbh->replace($i, "$prev:$new_next:$data"); + return $old_next; + } +} + + +// Local Variables: +// mode: php +// End: +?> diff --git a/lib/DbaPartition.php b/lib/DbaPartition.php new file mode 100644 index 000000000..9fed3d618 --- /dev/null +++ b/lib/DbaPartition.php @@ -0,0 +1,85 @@ +_h = &$dbm; + $this->_p = $prefix; + } + + function open($mode = 'w') { + $this->_h->open(); + } + + function close() { + $this->_h->close(); + } + + function firstkey() { + $dbh = &$this->_h; + $prefix = &$this->_p; + $n = strlen($prefix); + for ($key = $dbh->firstkey(); $key !== false; $key = $dbh->nextkey()) { + if (substr($key, 0, $n) == $prefix) + return (string) substr($key, $n); + } + return false; + } + + function nextkey() { + $dbh = &$this->_h; + $prefix = &$this->_p; + $n = strlen($prefix); + for ($key = $dbh->nextkey(); $key !== false; $key = $dbh->nextkey()) { + if (substr($key, 0, $n) == $prefix) + return (string) substr($key, $n); + } + return false; + } + + function exists($key) { + return $this->_h->exists($this->_p . $key); + } + + function fetch($key) { + return $this->_h->fetch($this->_p . $key); + } + + function insert($key, $val) { + return $this->_h->insert($this->_p . $key, $val); + } + + function replace($key, $val) { + return $this->_h->replace($this->_p . $key, $val); + } + + function delete($key) { + return $this->_h->delete($this->_p . $key); + } + + function get($key) { + return $this->_h->get($this->_p . $key); + } + + function set($key, $val) { + return $this->_h->set($this->_p . $key, $val); + } + + function sync() { + return $this->_h->sync(); + } + + function optimize() { + return $this->_h->optimize(); + } +} + + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/ErrorManager.php b/lib/ErrorManager.php new file mode 100644 index 000000000..bdab55b92 --- /dev/null +++ b/lib/ErrorManager.php @@ -0,0 +1,357 @@ +_handlers = array(); + $this->_fatal_handler = false; + $this->_postpone_mask = 0; + $this->_postponed_errors = array(); + + set_error_handler('ErrorManager_errorHandler'); + } + + /** + * Get mask indicating which errors are currently being postponed. + * @access public + * @return int The current postponed error mask. + */ + function getPostponedErrorMask() { + return $this->_postpone_mask; + } + + /** + * Set mask indicating which errors to postpone. + * + * The default value of the postpone mask is zero (no errors postponed.) + * + * When you set this mask, any queue errors which do not match tne new + * mask are reported. + * + * @access public + * @param $newmask int The new value for the mask. + */ + function setPostponedErrorMask($newmask) { + $this->_postpone_mask = $newmask; + $this->_flush_errors($newmask); + } + + /** + * Report any queued error messages. + * @access public + */ + function flushPostponedErrors() { + $this->_flush_errors(); + } + + /** + * Push a custom error handler on the handler stack. + * + * Sometimes one is performing an operation where one expects certain errors + * or warnings. In this case, one might not want these errors reported + * in the normal manner. Installing a custom error handler via this method + * allows one to intercept such errors. + * + * An error handler installed via this method should be either a + * function or an object method taking one argument: a PhpError object. + * + * The error handler should return either: + *
+ *
False
If it has not handled the error. In this case, error + * processing will proceed as if the handler had never been called: + * the error will be passed to the next handler in the stack, + * or the default handler, if there are no more handlers in the stack. + *
True
If the handler has handled the error. If the error was + * a non-fatal one, no further processing will be done. + * If it was a fatal error, the ErrorManager will still + * terminate the PHP process (see setFatalHandler.) + *
A PhpError object + *
The error is not considered + * handled, and will be passed on to the next handler(s) in the stack + * (or the default handler). + * The returned PhpError need not be the same as the one passed to the + * handler. This allows the handler to "adjust" the error message. + * + * @access public + * @param $handler string or array + * To register a global function as a handler, just pass the functions name + * (as a string). To register an object method as a handler, pass a array: + * the first element is the object, the second is the name of the method. + */ + function pushErrorHandler($handler) { + array_unshift($this->_handlers, $handler); + } + + /** + * Pop an error handler off the handler stack. + * @access public + */ + function popErrorHandler() { + return array_shift($this->_handlers); + } + + /** + * Set a termination handler. + * + * This handler will be called upon fatal errors. The handler gets passed + * one argument: a PhpError object describing the fatal error. + * + * @access public + * @param $handler string or array + * To register a global function as a handler, just pass the functions name + * (as a string). To register an object method as a handler, pass a array: + * the first element is the object, the second is the name of the method. + */ + function setFatalHandler($handler) { + $this->_fatal_handler = $handler; + } + + function _callHandler($handler, $error) { + if (is_string($handler)) { + return call_user_func($handler, $error); + } + else if (is_array($handler)) { + list($object, $method) = $handler; + if (method_exists($object, $method)) + return call_user_method($method, $object, $error); + } + echo "
ErrorManager::_callHandler: BAD HANDLER
\n"; + return false; + } + + /** + * Handle an error. + * + * The error is passed through any registered error handlers, + * and then either reported or postponed. + * + * @access public + * @param $error object A PhpError object. + */ + function handleError($error) { + static $in_handler; + + if (!empty($in_handler)) { + echo "

ErrorManager: error while handling error:

\n"; + echo $error->printError(); + return; + } + $in_handler = true; + + foreach ($this->_handlers as $handler) { + $result = $this->_callHandler($handler, $error); + if (!$result) { + continue; // Handler did not handle error. + } + elseif (is_object($result)) { + // handler filtered the result. Still should pass to the + // rest of the chain. + if ($error->isFatal()) { + // Don't let handlers make fatal errors non-fatal. + $result->errno = $error->errno; + } + $error = $result; + } + else { + // Handler handled error. + if (!$error->isFatal()) { + $in_handler = false; + return; + } + break; + } + } + + // Error was either fatal, or was not handled by a handler. + // Handle it ourself. + if ($error->isFatal()) { + $this->_die($error); + } + else if (($error->errno & error_reporting()) != 0) { + if (($error->errno & $this->_postpone_mask) != 0) { + $this->_postponed_errors[] = $error; + } + else { + $error->printError(); + } + } + $in_handler = false; + } + + /** + * @access private + */ + function _die($error) { + $error->printError(); + $this->_flush_errors(); + if ($this->_fatal_handler) + $this->_callHandler($this->_fatal_handler, $error); + exit -1; + } + + /** + * @access private + */ + function _flush_errors($keep_mask = 0) { + $errors = &$this->_postponed_errors; + foreach ($errors as $key => $error) { + if (($error->errno & $keep_mask) != 0) + continue; + unset($errors[$key]); + $error->printError(); + } + } +} + +/** + * Global error handler for class ErrorManager. + * + * This is necessary since PHP's set_error_handler() does not allow one to + * set an object method as a handler. + * + * @access private + */ +function ErrorManager_errorHandler($errno, $errstr, $errfile, $errline) +{ + global $ErrorManager; + $error = new PhpError($errno, $errstr, $errfile, $errline); + $ErrorManager->handleError($error); +} + + +/** + * A class representing a PHP error report. + * + * @see The PHP documentation for set_error_handler at + * http://php.net/manual/en/function.set-error-handler.php . + */ +class PhpError { + /** + * The PHP errno + */ + var $errno; + + /** + * The PHP error message. + */ + var $errstr; + + /** + * The source file where the error occurred. + */ + var $errfile; + + /** + * The line number (in $this->errfile) where the error occured. + */ + var $errline; + + /** + * Construct a new PhpError. + * @param $errno int + * @param $errstr string + * @param $errfile string + * @param $errline int + */ + function PhpError($errno, $errstr, $errfile, $errline) { + $this->errno = $errno; + $this->errstr = $errstr; + $this->errfile = $errfile; + $this->errline = $errline; + } + + /** + * Determine whether this is a fatal error. + * @return boolean True if this is a fatal error. + */ + function isFatal() { + return ($this->errno & (EM_WARNING_ERRORS|EM_NOTICE_ERRORS)) == 0; + } + + /** + * Determine whether this is a warning level error. + * @return boolean + */ + function isWarning() { + return ($this->errno & EM_WARNING_ERRORS) != 0; + } + + /** + * Determine whether this is a notice level error. + * @return boolean + */ + function isNotice() { + return ($this->errno & EM_NOTICE_ERRORS) != 0; + } + + /** + * Get a printable, HTML, message detailing this error. + * @return string The detailed error message. + */ + function getDetail() { + if ($this->isNotice()) + $what = 'Notice'; + else if ($this->isWarning()) + $what = 'Warning'; + else + $what = 'Fatal'; + + $errfile = ereg_replace('^' . getcwd() . '/', '', $this->errfile); + + $lines = explode("\n", $this->errstr); + $errstr = htmlspecialchars(array_shift($lines)); + foreach ($lines as $key => $line) + $lines[$key] = "
  • " . htmlspecialchars($line) . "<\li>"; + if ($lines) + $errstr .= "
      \n" . join("\n", $lines) . "\n
    "; + + return sprintf("

    %s:%d: %s[%d]: %s

    \n", + htmlspecialchars($errfile), + $this->errline, $what, $this->errno, + $errstr); + } + + /** + * Print an HTMLified version of this error. + * @see getDetail + */ + function printError() { + echo $this->getDetail(); + } +} + +if (!isset($GLOBALS['ErrorManager'])) { + $GLOBALS['ErrorManager'] = new ErrorManager; +} + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/PhpWikiDatabase.php b/lib/PhpWikiDatabase.php deleted file mode 100644 index 19f0ad721..000000000 --- a/lib/PhpWikiDatabase.php +++ /dev/null @@ -1,50 +0,0 @@ -_fix_magic_quotes_gpc(); + + switch($this->get('REQUEST_METHOD')) { + case 'GET': + case 'HEAD': + $this->args = &$GLOBALS['HTTP_GET_VARS']; + break; + case 'POST': + $this->args = &$GLOBALS['HTTP_POST_VARS']; + break; + default: + $this->args = array(); + break; + } + + $this->session = new Request_SessionVars; + $this->cookies = new Request_CookieVars; + + $this->_log_entry = new Request_AccessLogEntry($this); + + $TheRequest = $this; + } + + function get($key) { + $vars = &$GLOBALS['HTTP_SERVER_VARS']; + + if (isset($vars[$key])) + return $vars[$key]; + + switch ($key) { + case 'REMOTE_HOST': + $addr = $vars['REMOTE_ADDR']; + if (defined('ENABLE_REVERSE_DNS') && ENABLE_REVERSE_DNS) + return $vars[$key] = gethostbyaddr($addr); + else + return $addr; + default: + return false; + } + } + + function getArg($key) { + if (isset($this->args[$key])) + return $this->args[$key]; + return false; + } + + function setArg($key, $val) { + $this->args[$key] = $val; + } + + + function redirect($url) { + header("Location: $url"); + $this->_log_entry->setStatus(302); + } + + function compress_output() { + if (function_exists('ob_gzhandler')) { + ob_start('ob_gzhandler'); + $this->_is_compressing_output = true; + } + } + + function finish() { + if (!empty($this->_is_compressing_output)) + ob_end_flush(); + } + + + function getSessionVar($key) { + return $this->session->get($key); + } + function setSessionVar($key, $val) { + return $this->session->set($key, $val); + } + function deleteSessionVar($key) { + return $this->session->delete($key); + } + + function getCookieVar($key) { + return $this->cookies->get($key); + } + function setCookieVar($key, $val, $lifetime_in_days = false) { + return $this->cookies->set($key, $val, $lifetime_in_days); + } + function deleteCookieVar($key) { + return $this->cookies->delete($key); + } + + function getUploadedFile($key) { + return Request_UploadedFile::getUploadedFile($key); + } + + + function _fix_magic_quotes_gpc() { + $needs_fix = array('HTTP_POST_VARS', + 'HTTP_GET_VARS', + 'HTTP_COOKIE_VARS', + 'HTTP_SERVER_VARS', + 'HTTP_POST_FILES'); + + // Fix magic quotes. + if (get_magic_quotes_gpc()) { + foreach ($needs_fix as $vars) + $this->_stripslashes($GLOBALS[$vars]); + } + } + + function _stripslashes(&$var) { + if (is_array($var)) { + foreach ($var as $key => $val) + $this->_stripslashes($var[$key]); + } + elseif (is_string($var)) + $var = stripslashes($var); + } +} + +class Request_SessionVars { + function Request_SessionVars() { + session_start(); + } + + function get($key) { + $vars = &$GLOBALS['HTTP_SESSION_VARS']; + if (isset($vars[$key])) + return $vars[$key]; + return false; + } + + function set($key, $val) { + $vars = &$GLOBALS['HTTP_SESSION_VARS']; + if (ini_get('register_globals')) { + // This is funky but necessary, at least in some PHP's + $GLOBALS[$key] = $val; + } + $vars[$key] = $val; + session_register($key); + } + + function delete($key) { + $vars = &$GLOBALS['HTTP_SESSION_VARS']; + if (ini_get('register_globals')) + unset($GLOBALS[$key]); + unset($vars[$key]); + session_unregister($key); + } +} + +class Request_CookieVars { + + function get($key) { + $vars = &$GLOBALS['HTTP_COOKIE_VARS']; + if (isset($vars[$key])) { + @$val = unserialize($vars[$key]); + if (!empty($val)) + return $val; + } + return false; + } + + function set($key, $val, $persist_days = false) { + $vars = &$GLOBALS['HTTP_COOKIE_VARS']; + + if (is_numeric($persist_days)) { + $expires = time() + (24 * 3600) * $persist_days; + } + else { + $expires = 0; + } + + $packedval = serialize($val); + $vars[$key] = $packedval; + setcookie($key, $packedval, $expires, '/'); + } + + function delete($key) { + $vars = &$GLOBALS['HTTP_COOKIE_VARS']; + setcookie($key); + unset($vars[$key]); + } +} + +class Request_UploadedFile { + function getUploadedFile($postname) { + global $HTTP_POST_FILES; + + if (!isset($HTTP_POST_FILES[$postname])) + return false; + + $fileinfo = &$HTTP_POST_FILES[$postname]; + if (!is_uploaded_file($fileinfo['temp_name'])) + return false; // possible malicious attack. + + return new Request_UploadedFile($fileinfo); + } + + function Request_UploadedFile($fileinfo) { + $this->_info = $fileinfo; + } + + function getSize() { + return $this->_info['size']; + } + + function getName() { + return $this->_info['name']; + } + + function getType() { + return $this->_info['type']; + } + + function open() { + if ( ($fd = fopen($this->_info['tmp_name'], "rb")) ) { + // Dump http headers. + while ( ($header = fgets($fd, 4096)) ) + if (trim($header) == '') + break; + } + return $fd; + } + + function getContents() { + $fd = $this->open(); + $data = fread($fd, $this->getSize()); + fclose($fd); + return $data; + } +} + +class Request_AccessLogEntry +{ + function AccessLogEntry ($request) { + $this->host = $req->get('REMOTE_HOST'); + $this->ident = $req->get('REMOTE_IDENT'); + if (!$this->ident) + $this->ident = '-'; + $this->user = '-'; + $this->time = time(); + $this->request = join(' ', array($req->get('REQUEST_METHOD'), + $req->get('REQUEST_URI'), + $req->get('SERVER_PROTOCOL'))); + $this->status = 200; + $this->size = 0; + $this->referer = (string) $req->get('HTTP_REFERER'); + $this->user_agent = (string) $req->get('HTTP_USER_AGENT'); + } + + // + // Returns zone offset, like "-0800" for PST. + // + function _zone_offset () { + $offset = date("Z", $this->time); + if ($offset < 0) + { + $negoffset = "-"; + $offset = -$offset; + } + $offhours = floor($offset / 3600); + $offmins = $offset / 60 - $offhours * 60; + return sprintf("%s%02d%02d", $negoffset, $offhours, $offmins); + } + + // Format time into NCSA format. + function _ncsa_time($time = false) { + if (!$time) + $time = time(); + + return date("d/M/Y:H:i:s", $time) . + " " . $this->_zone_offset(); + } + + function write($logfile) { + $entry = sprintf('%s %s %s [%s] "%s" %d %d "%s" "%s"', + $this->host, $this->ident, $this->user, + $this->_ncsa_time($this->time), + $this->request, $this->status, $this->size, + $this->referer, $this->user_agent); + + //Error log doesn't provide locking. + //error_log("$entry\n", 3, $logfile); + + // Alternate method + if (($fp = fopen($logfile, "a"))) + { + flock($fp, LOCK_EX); + fputs($fp, "$entry\n"); + fclose($fp); + } + } +} + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/Template.php b/lib/Template.php new file mode 100644 index 000000000..70d097775 --- /dev/null +++ b/lib/Template.php @@ -0,0 +1,306 @@ +_tmpl = $this->_munge_input($tmpl); + $this->_tmpl = $tmpl; + $this->_vars = array(); + } + + function _munge_input($template) { + // Expand "< ?plugin-* ...? >" + preg_match_all('/<\?plugin.*?\?>/s', $template, $m); + global $dbi, $request; // FIXME: no globals? + $pluginLoader = new WikiPluginLoader; + foreach (array_unique($m[0]) as $plugin_pi) { + $orig[] = '/' . preg_quote($plugin_pi, '/') . '/s'; + $repl[] = $pluginLoader->expandPI($plugin_pi, $dbi, $request); + } + + // Convert ${VAR} to < ?php echo "$VAR"; ? > + //$orig[] = '/\${(\w[\w\d]*)}/'; + //$repl[] = ''; + $orig[] = '/\${(\w[\w\d]*)}/e'; + $repl[] = '$this->_getReplacement("\1")'; + + // Convert $VAR[ind] to < ?php echo "$VAR[ind]"; ? > + $orig[] = '/\$(\w[\w\d]*)\[([\w\d]+)\]/e'; + $repl[] = '$this->_getReplacement("\1", "\2")'; + + return preg_replace($orig, $repl, $template); + } + + function _getReplacement($varname, $index = false) { + // FIXME: report missing vars. + $vars = &$this->_vars; + if (isset($vars[$varname])) { + $value = $vars[$varname]; + if ($index !== false) + @$value = (string) $value[$index]; + return str_replace('?', '?', $value); + } + return false; + } + + /** + * Substitute HTML replacement text for tokens in template. + * + * Constructs a new WikiTemplate based upon the named template. + * + * @access public + * + * @param $token string Name of token to substitute for. + * + * @param $replacement string Replacement HTML text. + */ + function replace($varname, $value) { + $this->_vars[$varname] = $value; + } + + /** + * Substitute text for tokens in template. + * + * @access public + * + * @param $token string Name of token to substitute for. + * + * @param $replacement string Replacement text. + * The replacement text is run through htmlspecialchars() + * to escape any special characters. + */ + function qreplace($varname, $value) { + $this->_vars[$varname] = htmlspecialchars($value); + } + + + /** + * Include/remove conditional text in template. + * + * @access public + * + * @param $token string Conditional token name. + * The text within any matching if blocks (or single line ifs) will + * be included in the template expansion, while the text in matching + * negated if blocks will be excluded. + */ + /* + function setConditional($token, $value = true) { + $this->_iftoken[$token] = $value; + } + */ + + function getExpansion($varhash = false) { + $savevars = $this->_vars; + if (is_array($varhash)) { + foreach ($varhash as $key => $val) + $this->_vars[$key] = $val; + } + extract($this->_vars); + if (isset($this->_iftoken)) + $_iftoken = $this->_iftoken; + + ob_start(); + + //$this->_dump_template(); + + global $ErrorManager; + $ErrorManager->pushErrorHandler(array($this, '_errorHandler')); + eval('?>' . $this->_munge_input($this->_tmpl)); + $ErrorManager->popErrorHandler(); + + $html = ob_get_contents(); + ob_end_clean(); + + return $html; + } + + function printExpansion($args = false) { + echo $this->getExpansion($args); + } + + // Debugging: + function _dump_template () { + $lines = explode("\n", $this->_munge_input($this->_tmpl)); + echo "
    \n";
    +        $n = 1;
    +        foreach ($lines as $line)
    +            printf("%4d  %s\n", $n++, htmlspecialchars($line));
    +        echo "
    \n"; + } + + function _errorHandler($error) { + if (!preg_match('/: eval\(\)\'d code$/', $error->errfile)) + return false; + $error->errfile = "In template"; + $lines = explode("\n", $this->_tmpl); + if (isset($lines[$error->errline - 1])) + $error->errstr .= ":\n\t" . $lines[$error->errline - 1]; + return $error; + } +}; + +class TemplateFile +extends Template +{ + function TemplateFile($filename) { + $this->_template_file = $filename; + $fp = fopen($filename, "rb"); + $data = fread($fp, filesize($filename)); + fclose($fp); + $this->Template($data); + } +} + +class WikiTemplate +extends TemplateFile +{ + /** + * Constructor. + * + * Constructs a new WikiTemplate based upon the named template. + * + * @access public + * + * @param $template string Which template. + */ + function WikiTemplate($template, $page_revision = false) { + global $templates; + + $this->TemplateFile(FindLocalizedFile($templates[$template])); + $this->_template_name = $template; + + $this->setGlobalTokens(); + if ($page_revision) + $this->setPageRevisionTokens($page_revision); + } + + + function setPageTokens(&$page) { + /* + if ($page->get('locked')) + $this->setConditional('LOCK'); + // HACK: note that EDITABLE may also be set in setWikiUserTokens. + if (!$page->get('locked')) + $this->setConditional('EDITABLE'); + */ + + $pagename = $page->getName(); + $this->replace('page', $page); + $this->qreplace('PAGE', $pagename); + $this->qreplace('PAGEURL', rawurlencode($pagename)); + $this->qreplace('SPLIT_PAGE', split_pagename($pagename)); + $this->qreplace('BROWSE_PAGE', WikiURL($pagename)); + + // FIXME: this is a bit of dangerous hackage. + $this->qreplace('ACTION', WikiURL($pagename, array('action' => ''))); + + // FIXME:? + //$this->replace_callback('HITS', array($page, 'getHitCount')); + //$this->replace_callback('RELATEDPAGES', array($page, 'getHitCount')); + //_dotoken('RELATEDPAGES', LinkRelatedPages($dbi, $name), $page); + } + + function setPageRevisionTokens(&$revision) { + $page = & $revision->getPage(); + + $current = & $page->getCurrentRevision(); + $previous = & $page->getRevisionBefore($revision->getVersion()); + + $this->replace('IS_CURRENT', + $current->getVersion() == $revision->getVersion()); + + /* + if ($previous && $previous->getVersion() != 0) + $this->setConditional('COPY'); // FIXME: should rename HAVE_COPY? + */ + + global $datetimeformat; + + $this->qreplace('LASTMODIFIED', + strftime($datetimeformat, $revision->get('mtime'))); + $this->qreplace('LASTAUTHOR', $revision->get('author')); + $this->qreplace('VERSION', $revision->getVersion()); + $this->qreplace('CURRENT_VERSION', $current->getVersion()); + + $this->setPageTokens($page); + } + + function setWikiUserTokens(&$user) { + /* + if ( $user->is_admin() ) { + $this->setConditional('ADMIN'); + $this->setConditional('EDITABLE'); + } + if ( ! $user->is_authenticated() ) + $this->setConditional('ANONYMOUS'); + */ + $this->replace('user', $user); + $this->qreplace('USERID', $user->id()); + $prefs = $user->getPreferences(); + $this->qreplace('EDIT_AREA_WIDTH', $prefs['edit_area.width']); + $this->qreplace('EDIT_AREA_HEIGHT', $prefs['edit_area.height']); + } + + function setGlobalTokens () { + global $user, $logo, $RCS_IDS; + + // FIXME: This a a bit of dangerous hackage. + $this->qreplace('BROWSE', WikiURL('')); + $this->qreplace('CSS_URL', DataURL(CSS_URL)); + + if (isset($user)) + $this->setWikiUserTokens($user); + if (isset($logo)) + $this->qreplace('LOGO', DataURL($logo)); + if (isset($RCS_IDS)) + $this->qreplace('RCS_IDS', $RCS_IDS); + + $this->qreplace('BASE_URL', + // FIXME: + WikiURL($GLOBALS['pagename'], false, 'absolute_url')); + } +}; + + +/** + * Generate page contents using a template. + * + * This is a convenience function for backwards compatibility with the old + * GeneratePage(). + * + * @param $template string name of the template (see config.php for list of names) + * + * @param $content string html content to put into the page + * + * @param $title string page title + * + * @param $page_revision object Current WikiDB_PageRevision, if available. + * + * @return string HTML expansion of template. + */ +function GeneratePage($template, $content, $title, $page_revision = false) { + // require_once("lib/template.php"); + $t = new WikiTemplate($template); + if ($page_revision) + $t->setPageRevisionTokens($page_revision); + $t->replace('CONTENT', $content); + $t->replace('TITLE', $title); + return $t->getExpansion(); +} + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil33 +// End: +?> \ No newline at end of file diff --git a/lib/TextSearchQuery.php b/lib/TextSearchQuery.php new file mode 100644 index 000000000..589391b34 --- /dev/null +++ b/lib/TextSearchQuery.php @@ -0,0 +1,610 @@ + + *
    wiki -test + *
    Match strings containing the substring 'wiki', and not containing the + * substring 'test'. + *
    wiki word or page + *
    Match strings containing the substring 'wiki' and either the substring + * 'word' or the substring 'page'. + *
  • + * + * The full query syntax, in order of precedence, is roughly: + * + * The unary 'NOT' or '-' operator (they are equivalent) negates the + * following search clause. + * + * Search clauses may be joined with the (left-associative) binary operators + * 'AND' and 'OR'. + * + * Two adjoining search clauses are joined with an implicit 'AND'. This has + * lower precedence than either an explicit 'AND' or 'OR', so "a b OR c" + * parses as "a AND ( b OR c )", while "a AND b OR c" parses as + * "( a AND b ) OR c" (due to the left-associativity of 'AND' and 'OR'.) + * + * Search clauses can be grouped with parentheses. + * + * Phrases (or other things which don't look like words) can be forced to + * be interpreted as words by quoting them, either with single (') or double (") + * quotes. If you wan't to include the quote character within a quoted string, + * double-up on the quote character: 'I''m hungry' is equivalent to + * "I'm hungry". + */ +class TextSearchQuery { + /** + * Create a new query. + * + * @param $search_query string The query. Syntax is as described above. + * Note that an empty $search_query will match anything. + * @see TextSearchQuery + */ + function TextSearchQuery($search_query) { + $parser = new TextSearchQuery_Parser; + $this->_tree = $parser->parse($search_query); + $this->_optimize(); + } + + function _optimize() { + $this->_tree = $this->_tree->optimize(); + } + + /** + * Get a regexp which matches the query. + */ + function asRegexp() { + if (!isset($this->_regexp)) + $this->_regexp = '/^' . $this->_tree->regexp() . '/isS'; + return $this->_regexp; + } + + /** + * Match query against string. + * + * @param $string string The string to match. + * @return boolean True if the string matches the query. + */ + function match($string) { + return preg_match($this->asRegexp(), $string); + } + + + /** + * Get a regular expression suitable for highlighting matched words. + * + * This returns a PCRE regular expression which matches any non-negated + * word in the query. + * + * @return string The PCRE regexp. + */ + function getHighlightRegexp() { + if (!isset($this->_hilight_regexp)) { + $words = array_unique($this->_tree->highlight_words()); + if (!$words) { + $this->_hilight_regexp = false; + } + else { + foreach ($words as $key => $word) + $words[$key] = preg_quote($word, '/'); + $this->_hilight_regexp = '(?:' . join('|', $words) . ')'; + } + } + return $this->_hilight_regexp; + } + + /** + * Make an SQL clause which matches the query. + * + * @param $make_sql_clause_func string,function or array + * A callback which takes a single word as an argument and + * returns an SQL clause which will match exactly those records + * containing the word. The word passed to the callback will always + * be in all lower case. + * + * If $make_sql_clause_func is an array, it is interpreted as a method + * callback. The first element of the array is the object, the second + * element (a string) is the name of the method. + * + * If $make_sql_clause_func is a string, it is taken to be the name + * of a global function to call. + * + * Otherwise, $make_sql_clause_func is assumed to be a function object + * (created by create_function()). + * + * Example usage: + *
    +     *     function sql_title_match($word) {
    +     *         return sprintf("LOWER(title) like '%s'",
    +     *                        addslashes($word));
    +     *     }
    +     *
    +     *     ...
    +     *
    +     *     $query = new TextSearchQuery("wiki -page");
    +     *     $sql_clause = $query->makeSqlClause('sql_title_match');
    +     * 
    + * This will result in $sql_clause containing something like + * "(LOWER(title) like 'wiki') AND NOT (LOWER(title) like 'page')". + * + * @return string The PCRE regexp. + */ + function makeSqlClause($make_sql_clause_func) { + $this->_sql_clause_func = $make_sql_clause_func; + return $this->_sql_clause($this->_tree); + } + + function _sql_clause($node) { + switch ($node->op) { + case 'WORD': + $callback = $this->_sql_clause_func; + if (is_array($callback)) { + list($object, $method) = $callback; + return call_user_method($method, $object, $node->word); + } + elseif (is_string($callback)) + return call_user_func($callback, $node->word); + else + return $callback($node->word); + case 'NOT': + return "NOT (" . $this->_sql_clause($node->leaves[0]) . ")"; + case 'AND': + case 'OR': + $subclauses = array(); + foreach ($node->leaves as $leaf) + $subclauses[] = "(" . $this->_sql_clause($leaf) . ")"; + return join(" $node->op ", $subclauses); + default: + assert($node->op == 'VOID'); + return '1=1'; + } + } + + /** + * Get printable representation of the parse tree. + * + * This is for debugging only. + * @return string Printable parse tree. + */ + function asString() { + return $this->_as_string($this->_tree); + } + + function _as_string($node, $indent = '') { + switch ($node->op) { + case 'WORD': + return $indent . "WORD: $node->word"; + case 'VOID': + return $indent . "VOID"; + default: + $lines = array($indent . $node->op . ":"); + $indent .= " "; + foreach ($node->leaves as $leaf) + $lines[] = $this->_as_string($leaf, $indent); + return join("\n", $lines); + } + } +} + + +//////////////////////////////////////////////////////////////// +// +// Remaining classes are private. +// +//////////////////////////////////////////////////////////////// +/** + * Virtual base class for nodes in a TextSearchQuery parse tree. + * + * Also servers as a 'VOID' (contentless) node. + */ +class TextSearchQuery_node +{ + var $op = 'VOID'; + + /** + * Optimize this node. + * @return object Optimized node. + */ + function optimize() { + return $this; + } + + /** + * @return regexp matching this node. + */ + function regexp() { + return ''; + } + + /** + * @param bool True if this node has been negated (higher in the parse tree.) + * @return array A list of all non-negated words contained by this node. + */ + function highlight_words($negated = false) { + return array(); + } +} + +/** + * A word. + */ +class TextSearchQuery_node_word +extends TextSearchQuery_node +{ + var $op = "WORD"; + + function TextSearchQuery_node_word($word) { + $this->word = $word; + } + + function regexp() { + return '(?=.*' . preg_quote($this->word, '/') . ')'; + } + + function highlight_words($negated = false) { + return $negated ? array() : array($this->word); + } +} + + +/** + * A negated clause. + */ +class TextSearchQuery_node_not +extends TextSearchQuery_node +{ + var $op = "NOT"; + + function TextSearchQuery_node_not($leaf) { + $this->leaves = array($leaf); + } + + function optimize() { + $leaf = &$this->leaves[0]; + $leaf = $leaf->optimize(); + if ($leaf->op == 'NOT') + return $leaf->leaves[0]; // ( NOT ( NOT x ) ) -> x + return $this; + } + + function regexp() { + $leaf = &$this->leaves[0]; + return '(?!' . $leaf->regexp() . ')'; + } + + function highlight_words($negated = false) { + return $this->leaves[0]->highlight_words(!$negated); + } +} + +/** + * Virtual base class for 'AND' and 'OR conjoins. + */ +class TextSearchQuery_node_binop +extends TextSearchQuery_node +{ + function TextSearchQuery_node_binop($leaves) { + $this->leaves = $leaves; + } + + function _flatten() { + // This flattens e.g. (AND (AND a b) (OR c d) e) + // to (AND a b e (OR c d)) + $flat = array(); + foreach ($this->leaves as $leaf) { + $leaf = $leaf->optimize(); + if ($this->op == $leaf->op) + $flat = array_merge($flat, $leaf->leaves); + else + $flat[] = $leaf; + } + $this->leaves = $flat; + } + + function optimize() { + $this->_flatten(); + assert(!empty($this->leaves)); + if (count($this->leaves) == 1) + return $this->leaves[0]; // (AND x) -> x + return $this; + } + + function highlight_words($negated = false) { + $words = array(); + foreach ($this->leaves as $leaf) + array_splice($words,0,0, + $leaf->highlight_words($negated)); + return $words; + } +} + +/** + * A (possibly multi-argument) 'AND' conjoin. + */ +class TextSearchQuery_node_and +extends TextSearchQuery_node_binop +{ + var $op = "AND"; + + function optimize() { + $this->_flatten(); + + // Convert (AND (NOT a) (NOT b) c d) into (AND (NOT (OR a b)) c d). + // Since OR's are more efficient for regexp matching: + // (?!.*a)(?!.*b) vs (?!.*(?:a|b)) + + // Suck out the negated leaves. + $nots = array(); + foreach ($this->leaves as $key => $leaf) { + if ($leaf->op == 'NOT') { + $nots[] = $leaf->leaves[0]; + unset($this->leaves[$key]); + } + } + + // Combine the negated leaves into a single negated or. + if ($nots) { + $node = ( new TextSearchQuery_node_not + (new TextSearchQuery_node_or($nots)) ); + array_unshift($this->leaves, $node->optimize()); + } + + assert(!empty($this->leaves)); + if (count($this->leaves) == 1) + return $this->leaves[0]; // (AND x) -> x + return $this; + } + + function regexp() { + $regexp = ''; + foreach ($this->leaves as $leaf) + $regexp .= $leaf->regexp(); + return $regexp; + } +} + +/** + * A (possibly multi-argument) 'OR' conjoin. + */ +class TextSearchQuery_node_or +extends TextSearchQuery_node_binop +{ + var $op = "OR"; + + function regexp() { + // We will combine any of our direct descendents which are WORDs + // into a single (?=.*(?:word1|word2|...)) regexp. + + $regexps = array(); + $words = array(); + + foreach ($this->leaves as $leaf) { + if ($leaf->op == 'WORD') + $words[] = preg_quote($leaf->word, '/'); + else + $regexps[] = $leaf->regexp(); + } + + if ($words) + array_unshift($regexps, + '(?=.*' . $this->_join($words) . ')'); + + return $this->_join($regexps); + } + + function _join($regexps) { + assert(count($regexps) > 0); + + if (count($regexps) > 1) + return '(?:' . join('|', $regexps) . ')'; + else + return $regexps[0]; + } +} + + +//////////////////////////////////////////////////////////////// +// +// Parser: +// +//////////////////////////////////////////////////////////////// +define ('TSQ_TOK_WORD', 1); +define ('TSQ_TOK_BINOP', 2); +define ('TSQ_TOK_NOT', 4); +define ('TSQ_TOK_LPAREN', 8); +define ('TSQ_TOK_RPAREN', 16); + +class TextSearchQuery_Parser +{ + /* + * This is a simple recursive descent parser, based on the following grammar: + * + * toplist : + * | toplist expr + * ; + * + * + * list : expr + * | list expr + * ; + * + * expr : atom + * | expr BINOP atom + * ; + * + * atom : '(' list ')' + * | NOT atom + * | WORD + * ; + * + * The terminal tokens are: + * + * + * and|or BINOP + * -|not NOT + * ( LPAREN + * ) RPAREN + * [^-()\s][^()\s]* WORD + * "[^"]*" WORD + * '[^']*' WORD + */ + + function parse ($search_expr) { + $this->lexer = new TextSearchQuery_Lexer($search_expr); + $tree = $this->get_list('toplevel'); + assert($this->lexer->eof()); + unset($this->lexer); + return $tree; + } + + function get_list ($is_toplevel = false) { + $list = array(); + + // token types we'll accept as words (and thus expr's) for the + // purpose of error recovery: + $accept_as_words = TSQ_TOK_NOT | TSQ_TOK_BINOP; + if ($is_toplevel) + $accept_as_words |= TSQ_TOK_LPAREN | TSQ_TOK_RPAREN; + + while ( ($expr = $this->get_expr()) + || ($expr = $this->get_word($accept_as_words)) ) { + + $list[] = $expr; + } + + if (!$list) { + if ($is_toplevel) + return new TextSearchQuery_node; + else + return false; + } + return new TextSearchQuery_node_and($list); + } + + function get_expr () { + if ( !($expr = $this->get_atom()) ) + return false; + + $savedpos = $this->lexer->tell(); + while ( ($op = $this->lexer->get(TSQ_TOK_BINOP)) ) { + if ( ! ($right = $this->get_atom()) ) { + break; + } + + if ($op == 'and') + $expr = new TextSearchQuery_node_and(array($expr, $right)); + else { + assert($op == 'or'); + $expr = new TextSearchQuery_node_or(array($expr, $right)); + } + + $savedpos = $this->lexer->tell(); + } + $this->lexer->seek($savedpos); + + return $expr; + } + + + function get_atom() { + if ($word = $this->get_word()) + return $word; + + $savedpos = $this->lexer->tell(); + if ( $this->lexer->get(TSQ_TOK_LPAREN) ) { + if ( ($list = $this->get_list()) && $this->lexer->get(TSQ_TOK_RPAREN) ) + return $list; + } + elseif ( $this->lexer->get(TSQ_TOK_NOT) ) { + if ( ($atom = $this->get_atom()) ) + return new TextSearchQuery_node_not($atom); + } + $this->lexer->seek($savedpos); + return false; + } + + function get_word($accept = TSQ_TOK_WORD) { + if ( ($word = $this->lexer->get($accept)) ) + return new TextSearchQuery_node_word($word); + return false; + } +} + +class TextSearchQuery_Lexer { + function TextSearchQuery_Lexer ($query_str) { + $this->tokens = $this->tokenize($query_str); + $this->pos = 0; + } + + function tell() { + return $this->pos; + } + + function seek($pos) { + $this->pos = $pos; + } + + function eof() { + return $this->pos == count($this->tokens); + } + + function tokenize($string) { + $tokens = array(); + $buf = strtolower(ltrim($string)); + while (!empty($buf)) { + if (preg_match('/^(and|or)\s*/', $buf, $m)) { + $val = $m[1]; + $type = TSQ_TOK_BINOP; + } + elseif (preg_match('/^(-|not)\s*/', $buf, $m)) { + $val = $m[1]; + $type = TSQ_TOK_NOT; + } + elseif (preg_match('/^([()])\s*/', $buf, $m)) { + $val = $m[1]; + $type = $m[1] == '(' ? TSQ_TOK_LPAREN : TSQ_TOK_RPAREN; + } + elseif (preg_match('/^ " ( (?: [^"]+ | "" )* ) " \s*/x', $buf, $m)) { + $val = str_replace('""', '"', $m[1]); + $type = TSQ_TOK_WORD; + } + elseif (preg_match("/^ ' ( (?:[^']+|'')* ) ' \s*/x", $buf, $m)) { + $val = str_replace("''", "'", $m[1]); + $type = TSQ_TOK_WORD; + } + elseif (preg_match('/^([^-()][^()\s]*)\s*/', $buf, $m)) { + $val = $m[1]; + $type = TSQ_TOK_WORD; + } + else { + assert(empty($buf)); + break; + } + $buf = substr($buf, strlen($m[0])); + $tokens[] = array($type, $val); + } + return $tokens; + } + + function get($accept) { + if ($this->pos >= count($this->tokens)) + return false; + + list ($type, $val) = $this->tokens[$this->pos]; + if (($type & $accept) == 0) + return false; + + $this->pos++; + return $val; + } +} + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB.php b/lib/WikiDB.php new file mode 100644 index 000000000..b002b9247 --- /dev/null +++ b/lib/WikiDB.php @@ -0,0 +1,1151 @@ +WikiDB is a container for WikiDB_Pages + * which in turn contain WikiDB_PageRevisions. + * + * Conceptually a WikiDB contains all possible WikiDB_Pages, + * whether they have been initialized or not. Since all possible pages are already + * contained in a WikiDB, a call to WikiDB::getPage() will never fail + * (barring bugs and e.g. filesystem or SQL database problems.) + * + * Also each WikiDB_Page always contains at least one WikiDB_PageRevision: + * the default content (e.g. "Describe [PageName] here."). This default content + * has a version number of zero. + * + * WikiDB_PageRevisions have read-only semantics. One can only create new + * revisions or delete old ones --- one can not modify an existing revision. + */ +class WikiDB { + /** + * Open a WikiDB database. + * + * This is a static member function. This function inspects its + * arguments to determine the proper subclass of WikiDB to instantiate, + * and then it instantiates it. + * + * @access public + * + * @param $dbparams hash Database configuration parameters. + * Some pertinent paramters are: + *
    + *
    dbtype + *
    The back-end type. Current supported types are: + *
    + *
    SQL + *
    Generic SQL backend based on the PEAR/DB database abstraction + * library. + *
    dba + *
    Dba based backend. + *
    + * + *
    dsn + *
    (Used by the SQL backend.) + * The DSN specifying which database to connect to. + * + *
    prefix + *
    Prefix to be prepended to database table (and file names). + * + *
    directory + *
    (Used by the dba backend.) + * Which directory db files reside in. + * + *
    timeout + *
    (Used by the dba backend.) + * Timeout in seconds for opening (and obtaining lock) on the db files. + * + *
    dba_handler + *
    (Used by the dba backend.) + * Which dba handler to use. Good choices are probably either 'gdbm' + * or 'db2'. + *
    + * + * @return object A WikiDB object. + **/ + function open ($dbparams) { + $dbtype = $dbparams{'dbtype'}; + include_once("lib/WikiDB/$dbtype.php"); + $class = 'WikiDB_' . $dbtype; + return new $class ($dbparams); + } + + + /** + * Constructor + * @access protected + */ + function WikiDB ($backend, $dbparams) { + $this->_backend = &$backend; + $this->_cache = new WikiDB_cache($backend); + + //FIXME: devel checking. + //$this->_backend->check(); + } + + /** + * Get any user-level warnings about this WikiDB. + * + * Some back-ends, e.g. by default create there data files + * in the global /tmp directory. We would like to warn the user + * when this happens (since /tmp files tend to get wiped + * periodically.) Warnings such as these may be communicated + * from specific back-ends through this method. + * + * @access public + * + * @return string A warning message (or false if there is none.) + */ + function genericWarnings() { + return false; + } + + /** + * Close database connection. + * + * The database may no longer be used after it is closed. + * + * Closing a WikiDB invalidates all WikiDB_Pages, + * WikiDB_PageRevisions and WikiDB_PageIterators which + * have been obtained from it. + * + * @access public + */ + function close () { + $this->_backend->close(); + $this->_cache->close(); + } + + /** + * Get a WikiDB_Page from a WikiDB. + * + * A WikiDB consists of the (infinite) set of all possible pages, + * therefore this method never fails. + * + * @access public + * @param $pagename string Which page to get. + * @return object The requested WikiDB_Page. + */ + function getPage($pagename) { + assert(is_string($pagename) && $pagename); + return new WikiDB_Page($this, $pagename); + } + + + // Do we need this? + //function nPages() { + //} + + + /** + * Determine whether page exists (in non-default form). + * + *
    +     *   $is_page = $dbi->isWikiPage($pagename);
    +     * 
    + * is equivalent to + *
    +     *   $page = $dbi->getPage($pagename);
    +     *   $current = $page->getCurrentRevision();
    +     *   $is_page = ! $current->hasDefaultContents();
    +     * 
    + * however isWikiPage may be implemented in a more efficient + * manner in certain back-ends. + * + * @access public + * + * @param $pagename string Which page to check. + * + * @return boolean True if the page actually exists with non-default contents + * in the WikiDataBase. + */ + function isWikiPage ($pagename) { + $page = $this->getPage($pagename); + $current = $page->getCurrentRevision(); + return ! $current->hasDefaultContents(); + } + + /** + * Delete page from the WikiDB. + * + * Deletes all revisions of the page from the WikiDB. + * Also resets all page meta-data to the default values. + * + * @access public + * + * @param $pagename string Name of page to delete. + */ + function deletePage($pagename) { + $this->_cache->delete_page($pagename); + $this->_backend->set_links($pagename, false); + } + + /** + * Retrieve all pages. + * + * Gets the set of all pages with non-default contents. + * + * FIXME: do we need this? I think so. The simple searches + * need this stuff. + * + * @access public + * + * @param $include_defaulted boolean Normally pages whose most recent + * revision has empty content are considered to be non-existant. + * Unless $include_defaulted is set to true, those pages will + * not be returned. + * + * @return object A WikiDB_PageIterator which contains all pages + * in the WikiDB which have non-default contents. + */ + function getAllPages($include_defaulted = false) { + $result = $this->_backend->get_all_pages($include_defaulted); + return new WikiDB_PageIterator($this, $result); + } + + /** + * Title search. + * + * Search for pages containing (or not containing) certain words in their + * names. + * + * Pages are returned in alphabetical order whenever it is practical + * to do so. + * + * FIXME: should titleSearch and fullSearch be combined? I think so. + * + * @access public + * @param $search object A TextSearchQuery + * @return object A WikiDB_PageIterator containing the matching pages. + * @see TextSearchQuery + */ + function titleSearch($search) { + $result = $this->_backend->text_search($search); + return new WikiDB_PageIterator($this, $result); + } + + /** + * Full text search. + * + * Search for pages containing (or not containing) certain words in their + * entire text (this includes the page content and the page name). + * + * Pages are returned in alphabetical order whenever it is practical + * to do so. + * + * @access public + * + * @param $search object A TextSearchQuery object. + * @return object A WikiDB_PageIterator containing the matching pages. + * @see TextSearchQuery + */ + function fullSearch($search) { + $result = $this->_backend->text_search($search, 'full_text'); + return new WikiDB_PageIterator($this, $result); + } + + /** + * Find the pages with the greatest hit counts. + * + * Pages are returned in reverse order by hit count. + * + * @access public + * + * @param $limit unsigned The maximum number of pages to return. + * Set $limit to zero to return all pages. + * + * @return object A WikiDB_PageIterator containing the matching pages. + */ + function mostPopular($limit = 20) { + $result = $this->_backend->most_popular($limit); + return new WikiDB_PageIterator($this, $result); + } + + /** + * Find recent page revisions. + * + * Revisions are returned in reverse order by creation time. + * + * @access public + * + * @param $params hash This hash is used to specify various optional + * parameters: + *
    + *
    limit + *
    (integer) At most this many revisions will be returned. + *
    since + *
    (integer) Only revisions since this time (unix-timestamp) will be returned. + *
    include_minor_revisions + *
    (boolean) Also include minor revisions. (Default is not to.) + *
    exclude_major_revisions + *
    (boolean) Don't include non-minor revisions. + * (Exclude_major_revisions implies include_minor_revisions.) + *
    include_all_revisions + *
    (boolean) Return all matching revisions for each page. + * Normally only the most recent matching revision is returned + * for each page. + *
    + * + * @return object A WikiDB_PageRevisionIterator containing the matching revisions. + */ + function mostRecent($params = false) { + $result = $this->_backend->most_recent($params); + return new WikiDB_PageRevisionIterator($this, $result); + } +}; + + +/** + * An abstract base class which representing a wiki-page within a WikiDB. + * + * A WikiDB_Page contains a number (at least one) of WikiDB_PageRevisions. + */ +class WikiDB_Page +{ + function WikiDB_Page(&$wikidb, $pagename) { + $this->_wikidb = &$wikidb; + $this->_pagename = $pagename; + assert(!empty($this->_pagename)); + } + + /** + * Get the name of the wiki page. + * + * @access public + * + * @return string The page name. + */ + function getName() { + return $this->_pagename; + } + + + /** + * Delete an old revision of a WikiDB_Page. + * + * Deletes the specified revision of the page. + * It is a fatal error to attempt to delete the current revision. + * + * @access public + * + * @param $version integer Which revision to delete. (You can also + * use a WikiDB_PageRevision object here.) + */ + function deleteRevision($version) { + $backend = &$this->_wikidb->_backend; + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + $version = $this->_coerce_to_version($version); + if ($version == 0) + return; + + $backend->lock(); + $latestversion = $backend->get_latest_version($pagename); + if ($latestversion && $version == $latestversion) { + $backend->unlock(); + trigger_error("Attempt to delete most recent revision of '$pagename'", + E_USER_ERROR); + return; + } + + $cache->delete_versiondata($pagename, $version); + $backend->unlock(); + } + + /* + * Delete a revision, or possibly merge it with a previous + * revision. + * + * The idea is this: + * Suppose an author make a (major) edit to a page. Shortly + * after that the same author makes a minor edit (e.g. to fix + * spelling mistakes he just made.) + * + * Now some time later, where cleaning out old saved revisions, + * and would like to delete his minor revision (since there's really + * no point in keeping minor revisions around for a long time.) + * + * Note that the text after the minor revision probably represents + * what the author intended to write better than the text after the + * preceding major edit. + * + * So what we really want to do is merge the minor edit with the + * preceding edit. + * + * We will only do this when: + *
      + *
    • The revision being deleted is a minor one, and + *
    • It has the same author as the immediately preceding revision. + *
    + */ + function mergeRevision($version) { + $backend = &$this->_wikidb->_backend; + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + $version = $this->_coerce_to_version($version); + if ($version == 0) + return; + + $backend->lock(); + $latestversion = $backend->get_latest_version($pagename); + if ($latestversion && $version == $latestversion) { + $backend->unlock(); + trigger_error("Attempt to merge most recent revision of '$pagename'", + E_USER_ERROR); + return; + } + + $versiondata = $cache->get_versiondata($pagename, $version, true); + if (!$versiondata) { + // Not there? ... we're done! + $backend->unlock(); + return; + } + + if ($versiondata['is_minor_edit']) { + $previous = $backend->get_previous_version($pagename, $version); + if ($previous) { + $prevdata = $cache->get_versiondata($pagename, $previous); + if ($prevdata['author_id'] == $versiondata['author_id']) { + // This is a minor revision, previous version is by the + // same author. We will merge the revisions. + $cache->update_versiondata($pagename, $previous, + array('%content' => $versiondata['%content'], + '_supplanted' => $versiondata['_supplanted'])); + } + } + } + + $cache->delete_versiondata($pagename, $version); + $backend->unlock(); + } + + + /** + * Create a new revision of a WikiDB_Page. + * + * @access public + * + * @param $content string Contents of new revision. + * + * @param $metadata hash Metadata for new revision. + * All values in the hash should be scalars (strings or integers). + * + * + * @param $version int Version number for new revision. + * To ensure proper serialization of edits, $version must be + * exactly one higher than the current latest version. + * (You can defeat this check by setting $version to + * WIKIDB_FORCE_CREATE --- not usually recommended.) + * + * @param $links array List of pagenames which this page links to. + * + * @return object Returns the new WikiDB_PageRevision object. If $version was incorrect, + * returns false + */ + function createRevision($version, &$content, $metadata, $links) { + $backend = &$this->_wikidb->_backend; + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + $backend->lock(); + + $latestversion = $backend->get_latest_version($pagename); + $newversion = $latestversion + 1; + assert($newversion >= 1); + + if ($version != WIKIDB_FORCE_CREATE && $version != $newversion) { + $backend->unlock(); + return false; + } + + $data = $metadata; + + foreach ($data as $key => $val) { + if (empty($val) || $key[0] == '_' || $key[0] == '%') + unset($data[$key]); + } + + assert(!empty($data['author_id'])); + if (empty($data['author_id'])) + @$data['author_id'] = $data['author']; + + if (empty($data['mtime'])) + $data['mtime'] = time(); + + if ($latestversion) { + // Ensure mtimes are monotonic. + $pdata = $cache->get_versiondata($pagename, $latestversion); + if ($data['mtime'] < $pdata['mtime']) { + trigger_error("$pagename: Date of new revision non-monotonic", + E_USER_NOTICE); + $data['orig_mtime'] = $data['mtime']; + $data['mtime'] = $pdata['mtime']; + } + + // FIXME: use (possibly user specified) 'mtime' time or time()? + $cache->update_versiondata($pagename, $latestversion, + array('_supplanted' => $data['mtime'])); + } + + $data['%content'] = &$content; + + $cache->set_versiondata($pagename, $newversion, $data); + + //$cache->update_pagedata($pagename, array(':latestversion' => $newversion, + //':deleted' => empty($content))); + + $backend->set_links($pagename, $links); + + $backend->unlock(); + + // FIXME: probably should have some global state information in the backend + // to control when to optimize. + if (time() % 50 == 0) { + trigger_error('"Optimizing" backend', E_USER_NOTICE); + $backend->optimize(); + } + + return new WikiDB_PageRevision($this->_wikidb, $pagename, $newversion, $data); + } + + /** + * Get the most recent revision of a page. + * + * @access public + * + * @return object The current WikiDB_PageRevision object. + */ + function getCurrentRevision() { + $backend = &$this->_wikidb->_backend; + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + $backend->lock(); + $version = $backend->get_latest_version($pagename); + $revision = $this->getRevision($version); + $backend->unlock(); + assert($revision); + return $revision; + } + + /** + * Get a specific revision of a WikiDB_Page. + * + * @access public + * + * @param $version integer Which revision to get. + * + * @return object The requested WikiDB_PageRevision object, or false if the + * requested revision does not exist in the WikiDB. Note that + * version zero of any page always exists. + */ + function getRevision($version) { + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + if ($version == 0) + return new WikiDB_PageRevision($this->_wikidb, $pagename, 0); + + assert($version > 0); + $vdata = $cache->get_versiondata($pagename, $version); + if (!$vdata) + return false; + return new WikiDB_PageRevision($this->_wikidb, $pagename, $version, $vdata); + } + + /** + * Get previous page revision. + * + * This method find the most recent revision before a specified version. + * + * @access public + * + * @param $version integer Find most recent revision before this version. + * You can also use a WikiDB_PageRevision object to specify the $version. + * + * @return object The requested WikiDB_PageRevision object, or false if the + * requested revision does not exist in the WikiDB. Note that + * unless $version is greater than zero, a revision (perhaps version zero, + * the default revision) will always be found. + */ + function getRevisionBefore($version) { + $backend = &$this->_wikidb->_backend; + $pagename = &$this->_pagename; + + $version = $this->_coerce_to_version($version); + + if ($version == 0) + return false; + $backend->lock(); + $previous = $backend->get_previous_version($pagename, $version); + $revision = $this->getRevision($previous); + $backend->unlock(); + assert($revision); + return $revision; + } + + /** + * Get all revisions of the WikiDB_Page. + * + * This does not include the version zero (default) revision in the + * returned revision set. + * + * @return object a WikiDB_PageRevisionIterator containing all revisions of + * this WikiDB_Page in reverse order by version number. + */ + function getAllRevisions() { + $backend = &$this->_wikidb->_backend; + $revs = $backend->get_all_revisions($this->_pagename); + return new WikiDB_PageRevisionIterator($this->_wikidb, $revs); + } + + /** + * Find pages which link to or are linked from a page. + * + * @access public + * + * @param $reversed enum Which links to find: true for backlinks (default). + * + * @return object A WikiDB_PageIterator containing all matching pages. + */ + function getLinks($reversed = true) { + $backend = &$this->_wikidb->_backend; + $result = $backend->get_links($this->_pagename, $reversed); + return new WikiDB_PageIterator($this->_wikidb, $result); + } + + /** + * Access WikiDB_Page meta-data. + * + * @access public + * + * @param $key string Which meta data to get. + * Some reserved meta-data keys are: + *
    + *
    'locked'
    Is page locked? + *
    'hits'
    Page hit counter. + *
    'score
    Page score (not yet implement, do we need?) + *
    + * + * @return scalar The requested value, or false if the requested data + * is not set. + */ + function get($key) { + $cache = &$this->_wikidb->_cache; + if (!$key || $key[0] == '%') + return false; + $data = $cache->get_pagedata($this->_pagename); + return isset($data[$key]) ? $data[$key] : false; + } + + /** + * Get all the page meta-data as a hash. + * + * @return hash The page meta-data. + */ + function getMetaData() { + $cache = &$this->_wikidb->_cache; + $data = $cache->get_pagedata($this->_pagename); + $meta = array(); + foreach ($data as $key => $val) { + if (!empty($val) && $key[0] != '%') + $meta[$key] = $val; + } + return $meta; + } + + /** + * Set page meta-data. + * + * @see get + * @access public + * + * @param $key string Meta-data key to set. + * @param $newval string New value. + */ + function set($key, $newval) { + $cache = &$this->_wikidb->_cache; + $pagename = &$this->_pagename; + + assert($key && $key[0] != '%'); + + $data = $cache->get_pagedata($pagename); + + if (!empty($newval)) { + if (!empty($data[$key]) && $data[$key] == $newval) + return; // values identical, skip update. + $data[$key] = $newval; + } + else { + if (empty($data[$key])) + return; // values identical, skip update. + unset($data[$key]); + } + + $cache->update_pagedata($pagename, $data, array($key => $newval)); + } + + /** + * Increase page hit count. + * + * FIXME: IS this needed? Probably not. + * + * This is a convenience function. + *
     $page->increaseHitCount(); 
    + * is functionally identical to + *
     $page->set('hits',$page->get('hits')+1); 
    + * + * Note that this method may be implemented in more efficient ways + * in certain backends. + * + * @access public + */ + function increaseHitCount() { + @$newhits = $this->get('hits') + 1; + $this->set('hits', $newhits); + } + + /** + * Return a string representation of the WikiDB_Page + * + * This is really only for debugging. + * + * @access public + * + * @return string Printable representation of the WikiDB_Page. + */ + function asString () { + ob_start(); + printf("[%s:%s\n", get_class($this), $this->getName()); + print_r($this->getMetaData()); + echo "]\n"; + $strval = ob_get_contents(); + ob_end_clean(); + return $strval; + } + + + /** + * @access private + * @param $version_or_pagerevision int or object + * Takes either the version number (and int) or a WikiDB_PageRevision + * object. + * @return int The version number. + */ + function _coerce_to_version($version_or_pagerevision) { + if (method_exists($version_or_pagerevision, "getContent")) + $version = $version_or_pagerevision->getVersion(); + else + $version = (int) $version_or_pagerevision; + + assert($version >= 0); + return $version; + } +}; + +/** + * This class represents a specific revision of a WikiDB_Page within + * a WikiDB. + * + * A WikiDB_PageRevision has read-only semantics. You may only + * create new revisions (and delete old ones) --- you cannot + * modify existing revisions. + */ +class WikiDB_PageRevision +{ + function WikiDB_PageRevision(&$wikidb, $pagename, $version, $versiondata = false) { + $this->_wikidb = &$wikidb; + $this->_pagename = $pagename; + $this->_version = $version; + $this->_data = $versiondata ? $versiondata : array(); + } + + /** + * Get the WikiDB_Page which this revision belongs to. + * + * @access public + * + * @return object The WikiDB_Page which this revision belongs to. + */ + function getPage() { + return new WikiDB_Page($this->_wikidb, $this->_pagename); + } + + /** + * Get the version number of this revision. + * + * @access public + * + * @return int The version number of this revision. + */ + function getVersion() { + return $this->_version; + } + + /** + * Determine whether this revision has defaulted content. + * + * The default revision (version 0) of each page, as well as + * any pages which are created with empty content + * have their content defaulted to something like: + *
    +     *   Describe [ThisPage] here.
    +     * 
    + * + * @access public + * + * @return boolean Returns true if the page has default content. + */ + function hasDefaultContents() { + $data = &$this->_data; + return empty($data['%content']); + } + + /** + * Get the content as an array of lines. + * + * @access public + * + * @return array An array of lines. + * The lines should contain no trailing white space. + */ + function getContent() { + return explode("\n", $this->getPackedContent()); + } + + /** + * Get the content as a string. + * + * @access public + * + * @return string The page content. + * Lines are separated by new-lines. + */ + function getPackedContent() { + $data = &$this->_data; + + + if (empty($data['%content'])) { + // Replace empty content with default value. + return sprintf(gettext("Describe [%s] here."), + $this->_pagename); + } + + // There is (non-default) content. + assert($this->_version > 0); + + if (!is_string($data['%content'])) { + // Content was not provided to us at init time. + // (This is allowed because for some backends, fetching + // the content may be expensive, and often is not wanted + // by the user.) + // + // In any case, now we need to get it. + $data['%content'] = $this->_get_content(); + assert(is_string($data['%content'])); + } + + return $data['%content']; + } + + function _get_content() { + $cache = &$this->_wikidb->_cache; + $pagename = $this->_pagename; + $version = $this->_version; + + assert($version > 0); + + $newdata = $cache->get_versiondata($pagename, $version, true); + if ($newdata) { + assert(is_string($newdata['%content'])); + return $newdata['%content']; + } + else { + // else revision has been deleted... What to do? + return "Acck! Revision $version of $pagename seems to have been deleted!"; + } + } + + /** + * Get meta-data for this revision. + * + * + * @access public + * + * @param $key string Which meta-data to access. + * + * Some reserved revision meta-data keys are: + *
    + *
    'mtime'
    Time this revision was created (seconds since midnight Jan 1, 1970.) + * The 'mtime' meta-value is normally set automatically by the database + * backend, but it may be specified explicitly when creating a new revision. + *
    orig_mtime + *
    To ensure consistency of RecentChanges, the mtimes of the versions + * of a page must be monotonically increasing. If an attempt is + * made to create a new revision with an mtime less than that of + * the preceeding revision, the new revisions timestamp is force + * to be equal to that of the preceeding revision. In that case, + * the originally requested mtime is preserved in 'orig_mtime'. + *
    '_supplanted'
    Time this revision ceased to be the most recent. + * This meta-value is always automatically maintained by the database + * backend. (It is set from the 'mtime' meta-value of the superceding + * revision.) '_supplanted' has a value of 'false' for the current revision. + * + * FIXME: this could be refactored: + *
    author + *
    Author of the page (as he should be reported in, e.g. RecentChanges.) + *
    author_id + *
    Authenticated author of a page. This is used to identify + * the distinctness of authors when cleaning old revisions from + * the database. + *
    'is_minor_edit'
    Set if change was marked as a minor revision by the author. + *
    'summary'
    Short change summary entered by page author. + *
    + * + * Meta-data keys must be valid C identifers (they have to start with a letter + * or underscore, and can contain only alphanumerics and underscores.) + * + * @return string The requested value, or false if the requested value + * is not defined. + */ + function get($key) { + if (!$key || $key[0] == '%') + return false; + $data = &$this->_data; + return isset($data[$key]) ? $data[$key] : false; + } + + /** + * Get all the revision page meta-data as a hash. + * + * @return hash The revision meta-data. + */ + function getMetaData() { + $meta = array(); + foreach ($this->_data as $key => $val) { + if (!empty($val) && $key[0] != '%') + $meta[$key] = $val; + } + return $meta; + } + + + /** + * Return a string representation of the revision. + * + * This is really only for debugging. + * + * @access public + * + * @return string Printable representation of the WikiDB_Page. + */ + function asString () { + ob_start(); + printf("[%s:%d\n", get_class($this), $this->get('version')); + print_r($this->_data); + echo $this->getPackedContent() . "\n]\n"; + $strval = ob_get_contents(); + ob_end_clean(); + return $strval; + } +}; + + +/** + * A class which represents a sequence of WikiDB_Pages. + */ +class WikiDB_PageIterator +{ + function WikiDB_PageIterator(&$wikidb, &$pages) { + $this->_pages = $pages; + $this->_wikidb = &$wikidb; + } + + /** + * Get next WikiDB_Page in sequence. + * + * @access public + * + * @return object The next WikiDB_Page in the sequence. + */ + function next () { + if ( ! ($next = $this->_pages->next()) ) + return false; + + $pagename = &$next['pagename']; + if (isset($next['pagedata'])) + $this->_wikidb->_cache->cache_data($next); + + return new WikiDB_Page($this->_wikidb, $pagename); + } + + /** + * Release resources held by this iterator. + * + * The iterator may not be used after free() is called. + * + * There is no need to call free(), if next() has returned false. + * (I.e. if you iterate through all the pages in the sequence, + * you do not need to call free() --- you only need to call it + * if you stop before the end of the iterator is reached.) + * + * @access public + */ + function free() { + $this->_pages->free(); + } +}; + +/** + * A class which represents a sequence of WikiDB_PageRevisions. + */ +class WikiDB_PageRevisionIterator +{ + function WikiDB_PageRevisionIterator(&$wikidb, &$revisions) { + $this->_revisions = $revisions; + $this->_wikidb = &$wikidb; + } + + /** + * Get next WikiDB_PageRevision in sequence. + * + * @access public + * + * @return object The next WikiDB_PageRevision in the sequence. + */ + function next () { + if ( ! ($next = $this->_revisions->next()) ) + return false; + + $this->_wikidb->_cache->cache_data($next); + + $pagename = $next['pagename']; + $version = $next['version']; + $versiondata = $next['versiondata']; + assert(!empty($pagename)); + assert(is_array($versiondata)); + assert($version > 0); + + return new WikiDB_PageRevision($this->_wikidb, $pagename, $version, $versiondata); + } + + /** + * Release resources held by this iterator. + * + * The iterator may not be used after free() is called. + * + * There is no need to call free(), if next() has returned false. + * (I.e. if you iterate through all the revisions in the sequence, + * you do not need to call free() --- you only need to call it + * if you stop before the end of the iterator is reached.) + * + * @access public + */ + function free() { + $this->_revisions->free(); + } +}; + + +/** + * Data cache used by WikiDB. + * + * FIXME: Maybe rename this to caching_backend (or some such). + * + * @access protected + */ +class WikiDB_cache +{ + // FIXME: cache (limited) version data, too. + + function WikiDB_cache (&$backend) { + $this->_backend = &$backend; + + $this->_pagedata_cache = array(); + } + + function close() { + $this->_pagedata_cache = false; + } + + function get_pagedata($pagename) { + assert(is_string($pagename) && $pagename); + $cache = &$this->_pagedata_cache; + + if (!isset($cache[$pagename]) || !is_array($cache[$pagename])) { + $cache[$pagename] = $this->_backend->get_pagedata($pagename); + if (empty($cache[$pagename])) + $cache[$pagename] = array(); + } + + return $cache[$pagename]; + } + + function update_pagedata($pagename, $newdata) { + assert(is_string($pagename) && $pagename); + + $this->_backend->update_pagedata($pagename, $newdata); + + if (is_array($this->_pagedata_cache[$pagename])) { + $cachedata = &$this->_pagedata_cache[$pagename]; + foreach($newdata as $key => $val) + $cachedata[$key] = $val; + } + } + + function invalidate_cache($pagename) { + $this->_pagedata_cache[$pagename] = false; + } + + function delete_page($pagename) { + $this->_backend->delete_page($pagename); + $this->_pagedata_cache[$pagename] = false; + } + + // FIXME: ugly + function cache_data($data) { + if (isset($data['pagedata'])) + $this->_pagedata_cache[$data['pagename']] = $data['pagedata']; + } + + function get_versiondata($pagename, $version, $need_content = false) { + $vdata = $this->_backend->get_versiondata($pagename, $version, $need_content); + // FIXME: ugly + if ($vdata && !empty($vdata['%pagedata'])) + $this->_pagedata_cache[$pagename] = $vdata['%pagedata']; + return $vdata; + } + + function set_versiondata($pagename, $version, $data) { + $new = $this->_backend-> + set_versiondata($pagename, $version, $data); + } + + function update_versiondata($pagename, $version, $data) { + $new = $this->_backend-> + update_versiondata($pagename, $version, $data); + } + + function delete_versiondata($pagename, $version) { + $new = $this->_backend-> + delete_versiondata($pagename, $version); + } +}; + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/SQL.php b/lib/WikiDB/SQL.php new file mode 100644 index 000000000..ca334e565 --- /dev/null +++ b/lib/WikiDB/SQL.php @@ -0,0 +1,50 @@ +WikiDB($backend, $dbparams); + } + + + /** + * Determine whether page exists (in non-default form). + * @see WikiDB::isWikiPage + */ + function isWikiPage ($pagename) { + /* + if (empty($this->_iwpcache)) + $this->_iwpcache = array_flip($this->_backend->get_all_pagenames()); + return isset($this->_iwpcache[$pagename]); + */ + + if (!isset($this->_iwpcache[$pagename])) + $this->_iwpcache[$pagename] = $this->_backend->is_wiki_page($pagename); + return $this->_iwpcache[$pagename]; + + // Talk to the backend directly for max speed. + /* + $pagedata = $this->_cache->get_pagedata($pagename); + return !empty($pagedata[':non_default']); + */ + } +}; + + +// Local Variables: +// mode: php +// End: +?> diff --git a/lib/WikiDB/backend.php b/lib/WikiDB/backend.php new file mode 100644 index 000000000..a9d3104c5 --- /dev/null +++ b/lib/WikiDB/backend.php @@ -0,0 +1,489 @@ + + *
    locked
    If the page is locked. + *
    hits
    The page hit count. + *
    created
    Unix time of page creation. (FIXME: Deprecated: I + * don't think we need this...) + * + */ + function get_pagedata($pagename) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Update the page meta-data. + * + * Set page meta-data. + * + * Only meta-data whose keys are preset in $newdata is affected. + * + * For example: + *
    +     *   $backend->update_pagedata($pagename, array('locked' => 1)); 
    +     * 
    + * will set the value of 'locked' to 1 for the specified page, but it + * will not affect the value of 'hits' (or whatever other meta-data + * may have been stored for the page.) + * + * To delete a particular piece of meta-data, set it's value to false. + *
    +     *   $backend->update_pagedata($pagename, array('locked' => false)); 
    +     * 
    + * + * @param $pagename string Page name. + * @param $newdata hash New meta-data. + */ + function update_pagedata($pagename, $newdata) { + trigger_error("virtual", E_USER_ERROR); + } + + + /** + * Get the current version number for a page. + * + * @param $pagename string Page name. + * @return int The latest version number for the page. Returns zero if + * no versions of a page exist. + */ + function get_latest_version($pagename) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Get preceding version number. + * + * @param $pagename string Page name. + * @param $version int Find version before this one. + * @return int The version number of the version in the database which + * immediately preceeds $version. + */ + function get_previous_version($pagename, $version) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Get revision meta-data and content. + * + * @param $pagename string Page name. + * @param $version integer Which version to get. + * @param $want_content boolean + * Indicates the caller really wants the page content. If this + * flag is not set, the backend is free to skip fetching of the + * page content (as that may be expensive). If the backend omits + * the content, the backend might still want to set the value of + * '%content' to the empty string if it knows there's no content. + * + * @return hash The version data, or false if specified version does not + * exist. + * + * Some keys which might be present in the $versiondata hash are: + *
    + *
    %content + *
    This is a pseudo-meta-data element (since it's actually + * the page data, get it?) containing the page content. + * If the content was not fetched, this key may not be present. + *
    + * For description of other version meta-data see WikiDB_PageRevision::get(). + * @see WikiDB_PageRevision::get + */ + function get_versiondata($pagename, $version, $want_content = false) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Delete page from the database. + * + * Delete page (and all it's revisions) from the database. + * + * @param $pagename string Page name. + */ + function delete_page($pagename) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Delete an old revision of a page. + * + * Note that one is never allowed to delete the most recent version, + * but that this requirement is enforced by WikiDB not by the backend. + * + * In fact, to be safe, backends should probably allow the deletion of + * the most recent version. + * + * @param $pagename string Page name. + * @param $version integer Version to delete. + */ + function delete_versiondata($pagename, $version) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Create a new page revision. + * + * If the given ($pagename,$version) is already in the database, + * this method completely overwrites any stored data for that version. + * + * @param $pagename string Page name. + * @param $version int New revisions content. + * @param $data hash New revision metadata. + * + * @see get_versiondata + */ + function set_versiondata($pagename, $version, $data) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Update page version meta-data. + * + * If the given ($pagename,$version) is already in the database, + * this method only changes those meta-data values whose keys are + * explicity listed in $newdata. + * + * @param $pagename string Page name. + * @param $version int New revisions content. + * @param $newdata hash New revision metadata. + * @see set_versiondata, get_versiondata + */ + function update_versiondata($pagename, $version, $newdata) { + $data = $this->get_versiondata($pagename, $version, true); + if (!$data) { + assert($data); + return; + } + foreach ($newdata as $key => $val) { + if (empty($val)) + unset($data[$key]); + else + $data[$key] = $val; + } + $this->set_versiondata($pagename, $version, $data); + } + + /** + * Set links for page. + * + * @param $pagename string Page name. + * + * @param $links array List of page(names) which page links to. + */ + function set_links($pagename, $links) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Find pages which link to or are linked from a page. + * + * @param $pagename string Page name. + * @param $reversed boolean True to get backlinks. + * + * FIXME: array or iterator? + * @return object A WikiDB_backend_iterator. + */ + function get_links($pagename, $reversed) { + //FIXME: implement simple (but slow) link finder. + die("FIXME"); + } + + /** + * Get all revisions of a page. + * + * @param $pagename string The page name. + * @return object A WikiDB_backend_iterator. + */ + function get_all_revisions($pagename) { + include_once('lib/WikiDB/backend/dumb/AllRevisionsIter.php'); + return new WikiDB_backend_dumb_AllRevisionsIter($this, $pagename); + } + + /** + * Get all pages in the database. + * + * Pages should be returned in alphabetical order if that is + * feasable. + * + * @access protected + * + * @param $include_defaulted boolean + * If set, even pages with no content will be returned + * --- but still only if they have at least one revision (not + * counting the default revision 0) entered in the database. + * + * Normally pages whose current revision has empty content + * are not returned as these pages are considered to be + * non-existing. + * + * @return object A WikiDB_backend_iterator. + */ + function get_all_pages($include_defaulted) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Title or full text search. + * + * Pages should be returned in alphabetical order if that is + * feasable. + * + * @access protected + * + * @param $search object A TextSearchQuery object describing what pages + * are to be searched for. + * + * @param $fullsearch boolean If true, a full text search is performed, + * otherwise a title search is performed. + * + * @return object A WikiDB_backend_iterator. + * + * @see WikiDB::titleSearch + */ + function text_search($search = '', $fullsearch = false) { + // This is method implements a simple linear search + // through all the pages in the database. + // + // It is expected that most backends will overload + // method with something more efficient. + include_once('lib/WikiDB/backend/dumb/TextSearchIter.php'); + $pages = $this->get_all_pages(false); + return new WikiDB_backend_dumb_TextSearchIter($this, $pages, $search, $fullsearch); + } + + /** + * Find pages with highest hit counts. + * + * Find the pages with the highest hit counts. The pages should + * be returned in reverse order by hit count. + * + * @access protected + * @param $limit integer No more than this many pages + * @return object A WikiDB_backend_iterator. + */ + function most_popular($limit) { + // This is method fetches all pages, then + // sorts them by hit count. + // (Not very efficient.) + // + // It is expected that most backends will overload + // method with something more efficient. + include_once('lib/WikiDB/backend/dumb/MostPopularIter.php'); + $pages = $this->get_all_pages(false); + + return new WikiDB_backend_dumb_MostPopularIter($this, $pages, $limit); + } + + /** + * Find recent changes. + * + * @access protected + * @param $params hash See WikiDB::mostRecent for a description + * of parameters which can be included in this hash. + * @return object A WikiDB_backend_iterator. + * @see WikiDB::mostRecent + */ + function most_recent($params) { + // This method is very inefficient and searches through + // all pages for the most recent changes. + // + // It is expected that most backends will overload + // method with something more efficient. + include_once('lib/WikiDB/backend/dumb/MostRecentIter.php'); + $pages = $this->get_all_pages(true); + return new WikiDB_backend_dumb_MostRecentIter($this, $pages, $params); + } + + /** + * Lock backend database. + * + * Calls may be nested. + * + * @param $write_lock boolean Unless this is set to false, a write lock + * is acquired, otherwise a read lock. If the backend doesn't support + * read locking, then it should make a write lock no matter which type + * of lock was requested. + * + * All backends should support write locking. + */ + function lock($write_lock = true) { + } + + /** + * Unlock backend database. + * + * @param $force boolean Normally, the database is not unlocked until + * unlock() is called as many times as lock() has been. If $force is + * set to true, the the database is unconditionally unlocked. + */ + function unlock($force = false) { + } + + + /** + * Close database. + */ + function close () { + } + + /** + * Synchronize with filesystem. + * + * This should flush all unwritten data to the filesystem. + */ + function sync() { + } + + /** + * Optimize the database. + */ + function optimize() { + } + + /** + * Check database integrity. + * + * This should check the validity of the internal structure of the database. + * Errors should be reported via: + *
    +     *   trigger_error("Message goes here.", E_USER_WARNING);
    +     * 
    + * + * @return boolean True iff database is in a consistent state. + */ + function check() { + } + + /** + * Put the database into a consistent state. + * + * This should put the database into a consistent state. + * (I.e. rebuild indexes, etc...) + * + * @return boolean True iff successful. + */ + function rebuild() { + } + + function _parse_searchwords($search) { + $search = strtolower(trim($search)); + if (!$search) + return array(array(),array()); + + $words = preg_split('/\s+/', $search); + $exclude = array(); + foreach ($words as $key => $word) { + if ($word[0] == '-' && $word != '-') { + $word = substr($word, 1); + $exclude[] = preg_quote($word); + unset($words[$key]); + } + } + return array($words, $exclude); + } + +}; + +/** + * Iterator returned by backend methods which (possibly) return + * multiple records. + * + * FIXME: this should be two seperate classes: page_iter and version_iter. + */ +class WikiDB_backend_iterator +{ + /** + * Get the next record in the interator set. + * + * This returns a hash. The has may contain the following keys: + *
    + *
    pagename
    (string) the page name + *
    version
    (int) the version number + *
    pagedata
    (hash) page meta-data (as returned from backend::get_pagedata().) + *
    versiondata
    (hash) page meta-data (as returned from backend::get_versiondata().) + * + * If this is a page iterator, it must contain the 'pagename' entry --- the others + * are optional. + * + * If this is a version iterator, the 'pagename', 'version', and 'versiondata' + * entries are mandatory. ('pagedata' is optional.) + */ + function next() { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Release resources held by this iterator. + */ + function free() { + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/PearDB.php b/lib/WikiDB/backend/PearDB.php new file mode 100644 index 000000000..ceaa385cf --- /dev/null +++ b/lib/WikiDB/backend/PearDB.php @@ -0,0 +1,705 @@ +_dbh = DB::connect($dsn, true); //FIXME: true -> persistent connection + $dbh = &$this->_dbh; + if (DB::isError($dbh)) { + trigger_error("Can't connect to database: " + . $this->_pear_error_message($dbh), + E_USER_ERROR); + } + $this->_dbh = $dbh; + $dbh->setErrorHandling(PEAR_ERROR_CALLBACK, + array($this, '_pear_error_callback')); + $dbh->setFetchMode(DB_FETCHMODE_ASSOC); + + $prefix = isset($dbparams['prefix']) ? $dbparams['prefix'] : ''; + + $this->_table_names + = array('page_tbl' => $prefix . 'page', + 'version_tbl' => $prefix . 'version', + 'link_tbl' => $prefix . 'link', + 'recent_tbl' => $prefix . 'recent', + 'nonempty_tbl' => $prefix . 'nonempty'); + + $this->_lock_count = 0; + } + + /** + * Close database connection. + */ + function close () { + if (!$this->_dbh) + return; + if ($this->_lock_count) { + trigger_error("WARNING: database still locked" + . " (lock_count = $this->_lock_count)\n
    ", + E_USER_WARNING); + } + $this->_dbh->setErrorHandling(PEAR_ERROR_PRINT); // prevent recursive loops. + $this->unlock('force'); + + $this->_dbh->disconnect(); + $this->_dbh = false; + } + + + /* + * Test fast wikipage. + */ + function is_wiki_page($pagename) { + $dbh = &$this->_dbh; + extract($this->_table_names); + return $dbh->getOne(sprintf("SELECT $page_tbl.id" + . " FROM $nonempty_tbl INNER JOIN $page_tbl USING(id)" + . " WHERE pagename='%s'", + $dbh->quoteString($pagename))); + } + + function get_all_pagenames() { + $dbh = &$this->_dbh; + extract($this->_table_names); + return $dbh->getCol("SELECT pagename" + . " FROM $nonempty_tbl INNER JOIN $page_tbl USING(id)"); + } + + /** + * Read page information from database. + */ + function get_pagedata($pagename) { + $dbh = &$this->_dbh; + $page_tbl = $this->_table_names['page_tbl']; + + //trigger_error("GET_PAGEDATA $pagename", E_USER_NOTICE); + + $result = $dbh->getRow(sprintf("SELECT * FROM $page_tbl WHERE pagename='%s'", + $dbh->quoteString($pagename)), + DB_FETCHMODE_ASSOC); + if (!$result) + return false; + return $this->_extract_page_data($result); + } + + function _extract_page_data(&$query_result) { + extract($query_result); + $data = empty($pagedata) ? array() : unserialize($pagedata); + $data['hits'] = $hits; + return $data; + } + + function update_pagedata($pagename, $newdata) { + $dbh = &$this->_dbh; + $page_tbl = $this->_table_names['page_tbl']; + + // Hits is the only thing we can update in a fast manner. + if (count($newdata) == 1 && isset($newdata['hits'])) { + // Note that this will fail silently if the page does not + // have a record in the page table. Since it's just the + // hit count, who cares? + $dbh->query(sprintf("UPDATE $page_tbl SET hits=%d WHERE pagename='%s'", + $newdata['hits'], $dbh->quoteString($pagename))); + return; + } + + $this->lock(); + $data = $this->get_pagedata($pagename); + if (!$data) { + $data = array(); + $this->_get_pageid($pagename, true); // Creates page record + } + + @$hits = (int)$data['hits']; + unset($data['hits']); + + foreach ($newdata as $key => $val) { + if ($key == 'hits') + $hits = (int)$val; + else if (empty($val)) + unset($data[$key]); + else + $data[$key] = $val; + } + + $dbh->query(sprintf("UPDATE $page_tbl" + . " SET hits=%d, pagedata='%s'" + . " WHERE pagename='%s'", + $hits, + $dbh->quoteString(serialize($data)), + $dbh->quoteString($pagename))); + + $this->unlock(); + } + + function _get_pageid($pagename, $create_if_missing = false) { + + $dbh = &$this->_dbh; + $page_tbl = $this->_table_names['page_tbl']; + + $query = sprintf("SELECT id FROM $page_tbl WHERE pagename='%s'", + $dbh->quoteString($pagename)); + + if (!$create_if_missing) + return $dbh->getOne($query); + + $this->lock(); + $id = $dbh->getOne($query); + if (empty($id)) { + $max_id = $dbh->getOne("SELECT MAX(id) FROM $page_tbl"); + $id = $max_id + 1; + $dbh->query(sprintf("INSERT INTO $page_tbl" + . " (id,pagename,hits)" + . " VALUES (%d,'%s',0)", + $id, $dbh->quoteString($pagename))); + } + $this->unlock(); + return $id; + } + + function get_latest_version($pagename) { + $dbh = &$this->_dbh; + extract($this->_table_names); + return + (int)$dbh->getOne(sprintf("SELECT latestversion" + . " FROM $page_tbl" + . " INNER JOIN $recent_tbl USING(id)" + . " WHERE pagename='%s'", + $dbh->quoteString($pagename))); + } + + function get_previous_version($pagename, $version) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + return + (int)$dbh->getOne(sprintf("SELECT version" + . " FROM $version_tbl" + . " INNER JOIN $page_tbl USING(id)" + . " WHERE pagename='%s'" + . " AND version < %d" + . " ORDER BY version DESC" + . " LIMIT 1", + $dbh->quoteString($pagename), + $version)); + } + + /** + * Get version data. + * + * @param $version int Which version to get. + * + * @return hash The version data, or false if specified version does not + * exist. + */ + function get_versiondata($pagename, $version, $want_content = false) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + assert(!empty($pagename)); + assert($version > 0); + + //trigger_error("GET_REVISION $pagename $version $want_content", E_USER_NOTICE); + // FIXME: optimization: sometimes don't get page data? + + if ($want_content) { + $fields = "*"; + } + else { + $fields = ("$page_tbl.*," + . "mtime,minor_edit,versiondata," + . "content<>'' AS have_content"); + } + + $result = $dbh->getRow(sprintf("SELECT $fields" + . " FROM $page_tbl" + . " INNER JOIN $version_tbl USING(id)" + . " WHERE pagename='%s' AND version=%d", + $dbh->quoteString($pagename), $version), + DB_FETCHMODE_ASSOC); + + return $this->_extract_version_data($result); + } + + function _extract_version_data(&$query_result) { + if (!$query_result) + return false; + + extract($query_result); + $data = empty($versiondata) ? array() : unserialize($versiondata); + + $data['mtime'] = $mtime; + $data['is_minor_edit'] = !empty($minor_edit); + + if (isset($content)) + $data['%content'] = $content; + elseif ($have_content) + $data['%content'] = true; + else + $data['%content'] = ''; + + // FIXME: this is ugly. + if (isset($pagename)) { + // Query also includes page data. + // We might as well send that back too... + $data['%pagedata'] = $this->_extract_page_data($query_result); + } + + return $data; + } + + + /** + * Create a new revision of a page. + */ + function set_versiondata($pagename, $version, $data) { + $dbh = &$this->_dbh; + $version_tbl = $this->_table_names['version_tbl']; + + $minor_edit = (int) !empty($data['is_minor_edit']); + unset($data['is_minor_edit']); + + $mtime = (int)$data['mtime']; + unset($data['mtime']); + assert(!empty($mtime)); + + @$content = (string) $data['%content']; + unset($data['%content']); + + unset($data['%pagedata']); + + $this->lock(); + $id = $this->_get_pageid($pagename, true); + + // FIXME: optimize: mysql can do this with one REPLACE INTO (I think). + $dbh->query(sprintf("DELETE FROM $version_tbl" + . " WHERE id=%d AND version=%d", + $id, $version)); + + $dbh->query(sprintf("INSERT INTO $version_tbl" + . " (id,version,mtime,minor_edit,content,versiondata)" + . " VALUES(%d,%d,%d,%d,'%s','%s')", + $id, $version, $mtime, $minor_edit, + $dbh->quoteString($content), + $dbh->quoteString(serialize($data)))); + + $this->_update_recent_table($id); + $this->_update_nonempty_table($id); + + $this->unlock(); + } + + /** + * Delete an old revision of a page. + */ + function delete_versiondata($pagename, $version) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $this->lock(); + if ( ($id = $this->_get_pageid($pagename)) ) { + $dbh->query("DELETE FROM $version_tbl" + . " WHERE id=$id AND version=$version"); + $this->_update_recent_table($id); + // This shouldn't be needed (as long as the latestversion + // never gets deleted.) But, let's be safe. + $this->_update_nonempty_table($id); + } + $this->unlock(); + } + + /** + * Delete page from the database. + */ + function delete_page($pagename) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $this->lock(); + if ( ($id = $this->_get_pageid($pagename, 'id')) ) { + $dbh->query("DELETE FROM $version_tbl WHERE id=$id"); + $dbh->query("DELETE FROM $recent_tbl WHERE id=$id"); + $dbh->query("DELETE FROM $nonempty_tbl WHERE id=$id"); + $dbh->query("DELETE FROM $link_tbl WHERE linkfrom=$id"); + $nlinks = $dbh->getOne("SELECT COUNT(*) FROM $link_tbl WHERE linkto=$id"); + if ($nlinks) { + // We're still in the link table (dangling link) so we can't delete this + // altogether. + $dbh->query("UPDATE $page_tbl SET hits=0, pagedata='' WHERE id=$id"); + } + else { + $dbh->query("DELETE FROM $page_tbl WHERE id=$id"); + } + $this->_update_recent_table(); + $this->_update_nonempty_table(); + } + $this->unlock(); + } + + + // The only thing we might be interested in updating which we can + // do fast in the flags (minor_edit). I think the default + // update_versiondata will work fine... + //function update_versiondata($pagename, $version, $data) { + //} + + function set_links($pagename, $links) { + // Update link table. + // FIXME: optimize: mysql can do this all in one big INSERT. + + $dbh = &$this->_dbh; + extract($this->_table_names); + + $this->lock(); + $pageid = $this->_get_pageid($pagename, true); + + $dbh->query("DELETE FROM $link_tbl WHERE linkfrom=$pageid"); + + foreach($links as $link) { + if (isset($linkseen[$link])) + continue; + $linkseen[$link] = true; + $linkid = $this->_get_pageid($link, true); + $dbh->query("INSERT INTO $link_tbl (linkfrom, linkto)" + . " VALUES ($pageid, $linkid)"); + } + $this->unlock(); + } + + /** + * Find pages which link to or are linked from a page. + */ + function get_links($pagename, $reversed = true) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + if ($reversed) + list($have,$want) = array('linkee', 'linker'); + else + list($have,$want) = array('linker', 'linkee'); + + $qpagename = $dbh->quoteString($pagename); + + $result = $dbh->query("SELECT $want.*" + . " FROM $link_tbl" + . " INNER JOIN $page_tbl AS linker ON linkfrom=linker.id" + . " INNER JOIN $page_tbl AS linkee ON linkto=linkee.id" + . " WHERE $have.pagename='$qpagename'" + //. " GROUP BY $want.id" + . " ORDER BY $want.pagename", + DB_FETCHMODE_ASSOC); + + return new WikiDB_backend_PearDB_iter($this, $result); + } + + function get_all_pages($include_deleted) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + if ($include_deleted) { + $result = $dbh->query("SELECT * FROM $page_tbl ORDER BY pagename"); + } + else { + $result = $dbh->query("SELECT $page_tbl.*" + . " FROM $nonempty_tbl INNER JOIN $page_tbl USING(id)" + . " ORDER BY pagename"); + } + + return new WikiDB_backend_PearDB_iter($this, $result); + } + + /** + * Title search. + */ + function text_search($search = '', $fullsearch = false) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $table = "$nonempty_tbl INNER JOIN $page_tbl USING(id)"; + $fields = "$page_tbl.*"; + $callback = '_sql_match_clause'; + + if ($fullsearch) { + $table .= (" INNER JOIN $recent_tbl ON $page_tbl.id=$recent_tbl.id" + . " INNER JOIN $version_tbl" + . " ON $page_tbl.id=$version_tbl.id" + . " AND latestversion=version" ); + $fields .= ",$version_tbl.*"; + $callback = '_fullsearch_sql_match_clause'; + } + + + $search_clause = $search->makeSqlClause(array($this, $callback)); + + $result = $dbh->query("SELECT $fields FROM $table" + . " WHERE $search_clause" + . " ORDER BY pagename"); + + return new WikiDB_backend_PearDB_iter($this, $result); + } + + function _sql_match_clause($word) { + $word = $this->_dbh->quoteString($word); + return "LOWER(pagename) LIKE '%$word%'"; + } + + function _fullsearch_sql_match_clause($word) { + $word = $this->_dbh->quoteString($word); + return "LOWER(pagename) LIKE '%$word%' OR content LIKE '%$word%'"; + } + + /** + * Find highest hit counts. + */ + function most_popular($limit) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $limitclause = $limit ? " LIMIT $limit" : ''; + $result = $dbh->query("SELECT $page_tbl.*" + . " FROM $nonempty_tbl INNER JOIN $page_tbl USING(id)" + . " ORDER BY hits DESC" + . " $limitclause"); + + return new WikiDB_backend_PearDB_iter($this, $result); + } + + /** + * Find recent changes. + */ + function most_recent($params) { + $limit = 0; + $since = 0; + $include_minor_revisions = false; + $exclude_major_revisions = false; + $include_all_revisions = false; + extract($params); + + $dbh = &$this->_dbh; + extract($this->_table_names); + + $pick = array(); + if ($since) + $pick[] = "mtime >= $since"; + + if ($include_all_revisions) { + // Include all revisions of each page. + $table = "$page_tbl INNER JOIN $version_tbl USING(id)"; + + if ($exclude_major_revisions) { + // Include only minor revisions + $pick[] = "minor_edit <> 0"; + } + elseif (!$include_minor_revisions) { + // Include only major revisions + $pick[] = "minor_edit = 0"; + } + } + else { + $table = ( "$page_tbl INNER JOIN $recent_tbl USING(id)" + . " INNER JOIN $version_tbl ON $version_tbl.id=$page_tbl.id"); + + if ($exclude_major_revisions) { + // Include only most recent minor revision + $pick[] = 'version=latestminor'; + } + elseif (!$include_minor_revisions) { + // Include only most recent major revision + $pick[] = 'version=latestmajor'; + } + else { + // Include only the latest revision (whether major or minor). + $pick[] ='version=latestversion'; + } + } + + $limitclause = $limit ? " LIMIT $limit" : ''; + $whereclause = $pick ? " WHERE " . join(" AND ", $pick) : ''; + + // FIXME: use SQL_BUFFER_RESULT for mysql? + $result = $dbh->query("SELECT $page_tbl.*,$version_tbl.*" + . " FROM $table" + . $whereclause + . " ORDER BY mtime DESC" + . $limitclause); + + return new WikiDB_backend_PearDB_iter($this, $result); + } + + function _update_recent_table($pageid = false) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $maxmajor = "MAX(CASE WHEN minor_edit=0 THEN version END)"; + $maxminor = "MAX(CASE WHEN minor_edit<>0 THEN version END)"; + $maxversion = "MAX(version)"; + + $pageid = (int)$pageid; + + $this->lock(); + + $dbh->query("DELETE FROM $recent_tbl" + . ( $pageid ? " WHERE id=$pageid" : "")); + + $dbh->query( "INSERT INTO $recent_tbl" + . " (id, latestversion, latestmajor, latestminor)" + . " SELECT id, $maxversion, $maxmajor, $maxminor" + . " FROM $version_tbl" + . ( $pageid ? " WHERE id=$pageid" : "") + . " GROUP BY id" ); + $this->unlock(); + } + + function _update_nonempty_table($pageid = false) { + $dbh = &$this->_dbh; + extract($this->_table_names); + + $pageid = (int)$pageid; + + $this->lock(); + + $dbh->query("DELETE FROM $nonempty_tbl" + . ( $pageid ? " WHERE id=$pageid" : "")); + + $dbh->query("INSERT INTO $nonempty_tbl (id)" + . " SELECT $recent_tbl.id" + . " FROM $recent_tbl INNER JOIN $version_tbl" + . " ON $recent_tbl.id=$version_tbl.id" + . " AND version=latestversion" + . " WHERE content<>''" + . ( $pageid ? " AND $recent_tbl.id=$pageid" : "")); + + $this->unlock(); + } + + + /** + * Grab a write lock on the tables in the SQL database. + * + * Calls can be nested. The tables won't be unlocked until + * _unlock_database() is called as many times as _lock_database(). + * + * @access protected + */ + function lock($write_lock = true) { + if ($this->_lock_count++ == 0) + $this->_lock_tables($write_lock); + } + + /** + * Actually lock the required tables. + */ + function _lock_tables($write_lock) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Release a write lock on the tables in the SQL database. + * + * @access protected + * + * @param $force boolean Unlock even if not every call to lock() has been matched + * by a call to unlock(). + * + * @see _lock_database + */ + function unlock($force = false) { + if ($this->_lock_count == 0) + return; + if (--$this->_lock_count <= 0 || $force) { + $this->_unlock_tables(); + $this->_lock_count = 0; + } + } + + /** + * Actually unlock the required tables. + */ + function _unlock_tables($write_lock) { + trigger_error("virtual", E_USER_ERROR); + } + + /** + * Callback for PEAR (DB) errors. + * + * @access protected + * + * @param A PEAR_error object. + */ + function _pear_error_callback($error) { + $this->_dbh->setErrorHandling(PEAR_ERROR_PRINT); // prevent recursive loops. + $this->close(); + //trigger_error($this->_pear_error_message($error), E_USER_WARNING); + ExitWiki($this->_pear_error_message($error)); + } + + function _pear_error_message($error) { + $class = get_class($this); + $message = "$class: fatal database error\n" + . "\t" . $error->getMessage() . "\n" + . "\t(" . $error->getDebugInfo() . ")\n"; + + return $message; + } +}; + +class WikiDB_backend_PearDB_iter +extends WikiDB_backend_iterator +{ + function WikiDB_backend_PearDB_iter(&$backend, &$query_result) { + if (DB::isError($query_result)) { + // This shouldn't happen, I thought. + $backend->_pear_error_callback($query_result); + } + + $this->_backend = &$backend; + $this->_result = $query_result; + } + + function next() { + $backend = &$this->_backend; + if (!$this->_result) + return false; + + $record = $this->_result->fetchRow(DB_FETCHMODE_ASSOC); + if (!$record) { + $this->free(); + return false; + } + + $pagedata = $backend->_extract_page_data($record); + $rec = array('pagename' => $record['pagename'], + 'pagedata' => $pagedata); + + if (!empty($record['version'])) { + $rec['versiondata'] = $backend->_extract_version_data($record); + $rec['version'] = $record['version']; + } + + return $rec; + } + + function free () { + if ($this->_result) { + $this->_result->free(); + $this->_result = false; + } + } +} + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/PearDB_mysql.php b/lib/WikiDB/backend/PearDB_mysql.php new file mode 100644 index 000000000..e180abd07 --- /dev/null +++ b/lib/WikiDB/backend/PearDB_mysql.php @@ -0,0 +1,46 @@ +_dbh; + foreach ($this->_table_names as $table) { + $dbh->query("OPTIMIZE TABLE $table"); + } + } + + /** + * Lock tables. + */ + function _lock_tables($write_lock = true) { + $lock_type = $write_lock ? "WRITE" : "READ"; + foreach ($this->_table_names as $table) { + $tables[] = "$table $lock_type"; + } + $this->_dbh->query("LOCK TABLES " . join(",", $tables)); + } + + /** + * Release all locks. + */ + function _unlock_tables() { + $this->_dbh->query("UNLOCK TABLES"); + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/PearDB_pgsql.php b/lib/WikiDB/backend/PearDB_pgsql.php new file mode 100644 index 000000000..e678340ac --- /dev/null +++ b/lib/WikiDB/backend/PearDB_pgsql.php @@ -0,0 +1,76 @@ +pushErrorHandler(array($this,'_pgsql_open_error')); + $this->WikiDB_backend_PearDB($dbparams); + $ErrorManager->popErrorHandler(); + } + + function _pgsql_open_error($error) { + if (preg_match('/^Undefined\s+index:\s+(options|tty|port)/', + $error->errstr)) + return true; // Ignore error + return false; + } + + /** + * Pack tables. + */ + function optimize() { + $dbh = &$this->_dbh; + foreach ($this->_table_names as $table) { + $dbh->query("VACUUM ANALYZE $table"); + } + } + + /** + * Lock all tables we might use. + */ + function _lock_tables($write_lock = true) { + $dbh = &$this->_dbh; + + $dbh->query("BEGIN WORK"); + foreach ($this->_table_names as $table) { + // FIXME: can we use less restrictive locking. + // (postgres supports transactions, after all.) + $dbh->query("LOCK TABLE $table"); + } + } + + /** + * Unlock all tables. + */ + function _unlock_tables() { + $dbh = &$this->_dbh; + $dbh->query("COMMIT WORK"); + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/dba.php b/lib/WikiDB/backend/dba.php new file mode 100644 index 000000000..0a07eeb5b --- /dev/null +++ b/lib/WikiDB/backend/dba.php @@ -0,0 +1,39 @@ +set_timeout($timeout); + if (!$db->open('c')) { + trigger_error("$dbfile: Can't open dba database", E_USER_ERROR); + ExitWikit(); + } + + $this->WikiDB_backend_dbaBase($db); + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/dbaBase.php b/lib/WikiDB/backend/dbaBase.php new file mode 100644 index 000000000..1dc2351bf --- /dev/null +++ b/lib/WikiDB/backend/dbaBase.php @@ -0,0 +1,424 @@ +_db = &$dba; + // FIXME: page and version tables should be in their own files, probably. + // We'll pack them all in one for now (testing). + $this->_pagedb = new DbaPartition($dba, 'p'); + $this->_versiondb = new DbaPartition($dba, 'v'); + $linkdbpart = new DbaPartition($dba, 'l'); + $this->_linkdb = new WikiDB_backend_dbaBase_linktable($linkdbpart); + $this->_dbdb = new DbaPartition($dba, 'd'); + } + + function close() { + $this->_db->close(); + } + + function optimize() { + $this->_db->optimize(); + } + + function sync() { + $this->_db->sync(); + } + + function rebuild() { + $this->_linkdb->rebuild(); + $this->optimize(); + } + + function check() { + return $this->_linkdb->check(); + } + + function get_pagedata($pagename) { + $result = $this->_pagedb->get($pagename); + if (!$result) + return false; + list(,,$packed) = explode(':', $result, 3); + $data = unserialize($packed); + return $data; + } + + + function update_pagedata($pagename, $newdata) { + $result = $this->_pagedb->get($pagename); + if ($result) { + list($latestversion,$flags,$data) = explode(':', $result, 3); + $data = unserialize($data); + } + else { + $latestversion = $flags = 0; + $data = array(); + } + + foreach ($newdata as $key => $val) { + if (empty($val)) + unset($data[$key]); + else + $data[$key] = $val; + } + $this->_pagedb->set($pagename, + (int)$latestversion . ':' + . (int)$flags . ':' + . serialize($data)); + } + + function get_latest_version($pagename) { + return (int) $this->_pagedb->get($pagename); + } + + function get_previous_version($pagename, $version) { + $versdb = &$this->_versiondb; + + while (--$version > 0) { + if ($versdb->exists($version . ":$pagename")) + return $version; + } + return false; + } + + function get_versiondata($pagename, $version, $want_content = false) { + $data = $this->_versiondb->get((int)$version . ":$pagename"); + return $data ? unserialize($data) : false; + } + + /** + * Delete page from the database. + */ + function delete_page($pagename) { + $pagedb = &$this->_pagedb; + $versdb = &$this->_versiondb; + + $version = $this->get_latest_version($pagename); + while ($version > 0) { + $versdb->set($version-- . ":$pagename", false); + } + $pagedb->set($pagename, false); + } + + /** + * Delete an old revision of a page. + */ + function delete_versiondata($pagename, $version) { + $versdb = &$this->_versiondb; + + $latest = $this->get_latest_version($pagename); + + assert($version > 0); + assert($version <= $latest); + + $versdb->set((int)$version . ":$pagename", false); + + if ($version == $latest) { + $previous = $this->get_previous_version($version); + if ($previous> 0) { + $pvdata = $this->get_versiondata($pagename, $previous); + $is_empty = empty($pvdata['%content']); + } + else + $is_empty = true; + $this->_update_latest_version($pagename, $previous, $is_empty); + } + } + + /** + * Create a new revision of a page. + */ + function set_versiondata($pagename, $version, $data) { + $versdb = &$this->_versiondb; + + $versdb->set((int)$version . ":$pagename", serialize($data)); + if ($version > $this->get_latest_version($pagename)) + $this->_update_latest_version($pagename, $version, empty($data['%content'])); + } + + function _update_latest_version($pagename, $latest, $flags) { + $pagedb = &$this->_pagedb; + + $pdata = $pagedb->get($pagename); + if ($pdata) + list(,,$pagedata) = explode(':',$pdata,3); + else + $pagedata = serialize(array()); + + $pagedb->set($pagename, (int)$latest . ':' . (int)$flags . ":$pagedata"); + } + + function get_all_pages($include_deleted = false) { + $pagedb = &$this->_pagedb; + + $pages = array(); + for ($page = $pagedb->firstkey(); $page!== false; $page = $pagedb->nextkey()) { + if (!$page) { + assert(!empty($page)); + continue; + } + + if (!$include_deleted) { + list($latestversion,$flags,) = explode(':', $pagedb->get($page), 3); + if ($latestversion == 0 || $flags != 0) + continue; // current content is empty + } + $pages[] = $page; + } + usort($pages, 'WikiDB_backend_dbaBase_sortbypagename'); + return new WikiDB_backend_dbaBase_pageiter($this, $pages); + } + + function set_links($pagename, $links) { + $this->_linkdb->set_links($pagename, $links); + } + + + function get_links($pagename, $reversed = true) { + /* + if ($reversed) { + include_once('lib/WikiDB/backend/dumb/BackLinkIter.php'); + $pages = $this->get_all_pages(); + return new WikiDB_backend_dumb_BackLinkIter($this, $pages, $pagename); + } + */ + $links = $this->_linkdb->get_links($pagename, $reversed); + return new WikiDB_backend_dbaBase_pageiter($this, $links); + } +}; + +function WikiDB_backend_dbaBase_sortbypagename ($a, $b) { + $aname = $a['pagename']; + $bname = $b['pagename']; + return strcasecmp($aname, $bname); +} + + +class WikiDB_backend_dbaBase_pageiter +extends WikiDB_backend_iterator +{ + function WikiDB_backend_dbaBase_pageiter(&$backend, &$pages) { + $this->_backend = $backend; + $this->_pages = $pages ? $pages : array(); + } + + function next() { + if ( ! ($next = array_shift($this->_pages)) ) + return false; + return array('pagename' => $next); + } + + function free() { + $this->_pages = array(); + } +}; + +class WikiDB_backend_dbaBase_linktable +{ + function WikiDB_backend_dbaBase_linktable(&$dba) { + $this->_db = &$dba; + } + + //FIXME: try stroring link lists as hashes rather than arrays. + // (backlink deletion would be faster.) + + function get_links($page, $reversed = true) { + return $this->_get_links($reversed ? 'i' : 'o', $page); + } + + function set_links($page, $newlinks) { + + $oldlinks = $this->_get_links('o', $page); + + if (!is_array($newlinks)) { + assert(empty($newlinks)); + $newlinks = array(); + } + else { + $newlinks = array_unique($newlinks); + } + sort($newlinks); + $this->_set_links('o', $page, $newlinks); + + reset($newlinks); + reset($oldlinks); + $new = current($newlinks); + $old = current($oldlinks); + while ($new !== false || $old !== false) { + if ($old === false || ($new !== false && $new < $old)) { + // $new is a new link (not in $oldlinks). + $this->_add_backlink($new, $page); + $new = next($newlinks); + } + elseif ($new === false || $old < $new) { + // $old is a obsolete link (not in $newlinks). + $this->_delete_backlink($old, $page); + $old = next($oldlinks); + } + else { + // Unchanged link (in both $newlist and $oldlinks). + assert($new == $old); + $new = next($newlinks); + $old = next($oldlinks); + } + } + } + + /** + * Rebuild the back-link index. + * + * This should never be needed, but if the database gets hosed for some reason, + * this should put it back into a consistent state. + * + * We assume the forward links in the our table are correct, and recalculate + * all the backlinks appropriately. + */ + function rebuild () { + $db = &$this->_db; + + // Delete the backlink tables, make a list of page names. + $okeys = array(); + $ikeys = array(); + for ($key = $db->firstkey(); $key; $key = $db->nextkey()) { + if ($key[0] == 'i') + $ikeys[] = $key; + elseif ($key[0] == 'o') + $okeys[] = $key; + else { + trigger_error("Bad key in linktable: '$key'", E_USER_WARNING); + $ikeys[] = $key; + } + } + foreach ($ikeys as $key) { + $db->delete($key); + } + foreach ($okeys as $key) { + $page = substr($key,1); + $links = $this->_get_links('o', $page); + $db->delete($key); + $this->set_links($page, $links); + } + } + + function check() { + $db = &$this->_db; + + // FIXME: check for sortedness and uniqueness in links lists. + + for ($key = $db->firstkey(); $key; $key = $db->nextkey()) { + if (strlen($key) < 1 || ($key[0] != 'i' && $key[0] != 'o')) { + $errs[] = "Bad key '$key' in table"; + continue; + } + $page = substr($key, 1); + if ($key[0] == 'o') { + // Forward links. + foreach($this->_get_links('o', $page) as $link) { + if (!$this->_has_link('i', $link, $page)) + $errs[] = "backlink entry missing for link '$page'->'$link'"; + } + } + else { + assert($key[0] == 'i'); + // Backlinks. + foreach($this->_get_links('i', $page) as $link) { + if (!$this->_has_link('o', $link, $page)) + $errs[] = "link entry missing for backlink '$page'<-'$link'"; + } + } + } + + return isset($errs) ? $errs : false; + } + + + function _add_backlink($page, $linkedfrom) { + $backlinks = $this->_get_links('i', $page); + $backlinks[] = $linkedfrom; + sort($backlinks); + $this->_set_links('i', $page, $backlinks); + } + + function _delete_backlink($page, $linkedfrom) { + $backlinks = $this->_get_links('i', $page); + foreach ($backlinks as $key => $backlink) { + if ($backlink == $linkedfrom) + unset($backlinks[$key]); + } + $this->_set_links('i', $page, $backlinks); + } + + function _has_link($which, $page, $link) { + $links = $this->_get_links($which, $page); + foreach($links as $l) { + if ($l == $link) + return true; + } + return false; + } + + function _get_links($which, $page) { + $data = $this->_db->get($which . $page); + return $data ? unserialize($data) : array(); + } + + function _set_links($which, $page, &$links) { + $key = $which . $page; + if ($links) + $this->_db->set($key, serialize($links)); + else + $this->_db->set($key, false); + } +} + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/dumb/AllRevisionsIter.php b/lib/WikiDB/backend/dumb/AllRevisionsIter.php new file mode 100644 index 000000000..dafd7c4e8 --- /dev/null +++ b/lib/WikiDB/backend/dumb/AllRevisionsIter.php @@ -0,0 +1,68 @@ +_backend = &$backend; + $this->_pagename = $pagename; + $this->_lastversion = -1; + } + + /** + * Get next revision in sequence. + * + * @see WikiDB_backend_iterator_next; + */ + function next () { + $backend = &$this->_backend; + $pagename = &$this->_pagename; + $version = &$this->_lastversion; + + $backend->lock(); + if ($this->_lastversion == -1) + $version = $backend->get_latest_version($pagename); + elseif ($this->_lastversion > 0) + $version = $backend->get_previous_version($pagename, $version); + + if ($version) + $vdata = $backend->get_versiondata($pagename, $version); + $backend->unlock(); + + if ($version == 0) + return false; + + $rev = array('versiondata' => $vdata, + 'pagename' => $pagename, + 'version' => $version); + + if (!empty($vdata['%pagedata'])) + $rev['pagedata'] = &$vdata['%pagedata']; + + return $rev; + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/dumb/BackLinkIter.php b/lib/WikiDB/backend/dumb/BackLinkIter.php new file mode 100644 index 000000000..21dfaeeeb --- /dev/null +++ b/lib/WikiDB/backend/dumb/BackLinkIter.php @@ -0,0 +1,39 @@ +_pages = $all_pages; + $this->_backend = &$backend; + $this->_target = $pagename; + } + + function next() { + while ($page = $this->_pages->next()) { + $pagename = $page['pagename']; + $links = $this->_backend->get_links($pagename, false); + while ($link = $links->next()) { + if ($link['pagename'] == $this->_target) { + $links->free(); + return $page; + } + } + } + } + + function free() { + $this->_pages->free(); + } +} + +?> diff --git a/lib/WikiDB/backend/dumb/MostPopularIter.php b/lib/WikiDB/backend/dumb/MostPopularIter.php new file mode 100644 index 000000000..e5e135170 --- /dev/null +++ b/lib/WikiDB/backend/dumb/MostPopularIter.php @@ -0,0 +1,48 @@ +_pages = array(); + $pages = &$this->_pages; + + while ($page = & $all_pages->next()) { + if (!isset($page['pagedata'])) + $page['pagedata'] = $backend->get_pagedata($page['pagename']); + $pages[] = $page; + } + + usort($pages, 'WikiDB_backend_dumb_MostPopularIter_sortf'); + + if ($limit && $limit < count($pages)) + array_splice($pages, $limit); + } + + function next() { + return array_shift($this->_pages); + } + + function free() { + unset($this->_pages); + } +} + +function WikiDB_backend_dumb_MostPopularIter_sortf($a,$b) { + @$ahits = (int)$a['pagedata']['hits']; + @$bhits = (int)$b['pagedata']['hits']; + return $bhits - $ahits; +} + +?> diff --git a/lib/WikiDB/backend/dumb/MostRecentIter.php b/lib/WikiDB/backend/dumb/MostRecentIter.php new file mode 100644 index 000000000..f64ba9257 --- /dev/null +++ b/lib/WikiDB/backend/dumb/MostRecentIter.php @@ -0,0 +1,73 @@ +_revisions = array(); + while ($page = $pages->next()) { + $revs = $backend->get_all_revisions($page['pagename']); + while ($revision = &$revs->next()) { + $vdata = &$revision['versiondata']; + assert(is_array($vdata)); + if (!empty($vdata['is_minor_edit'])) { + if (!$include_minor_revisions) + continue; + } + else { + if ($exclude_major_revisions) + continue; + } + if (!empty($since) && $vdata['mtime'] < $since) + break; + + $this->_revisions[] = $revision; + + if (!$include_all_revisions) + break; + } + $revs->free(); + } + + usort($this->_revisions, 'WikiDB_backend_dumb_MostRecentIter_sortf'); + if (!empty($limit) && $limit < count($this->_revisions)) + array_splice($this->_revisions, $limit); + } + + function next() { + return array_shift($this->_revisions); + } + + function free() { + unset($this->_revisions); + } +} + +function WikiDB_backend_dumb_MostRecentIter_sortf($a, $b) { + $acreated = $a['versiondata']['mtime']; + $bcreated = $b['versiondata']['mtime']; + return $bcreated - $acreated; +} + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/backend/dumb/TextSearchIter.php b/lib/WikiDB/backend/dumb/TextSearchIter.php new file mode 100644 index 000000000..bb34b41cb --- /dev/null +++ b/lib/WikiDB/backend/dumb/TextSearchIter.php @@ -0,0 +1,57 @@ +_backend = &$backend; + $this->_pages = $pages; + $this->_fullsearch = $fullsearch; + $this->_search = $search; + } + + function _get_content(&$page) { + $backend = &$this->_backend; + $pagename = $page['pagename']; + + if (!isset($page['versiondata'])) { + $version = $backend->get_latest_version($pagename); + $page['versiondata'] = $backend->get_versiondata($pagename, $version, true); + } + return $page['versiondata']['%content']; + } + + + function _match(&$page) { + $text = $page['pagename']; + if ($this->_fullsearch) + $text .= "\n" . $this->_get_content($page); + + return $this->_search->match($text); + } + + function next() { + $pages = &$this->_pages; + + while ($page = $pages->next()) { + if ($this->_match($page)) + return $page; + } + return false; + } + + function free() { + $this->_pages->free(); + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiDB/dba.php b/lib/WikiDB/dba.php new file mode 100644 index 000000000..e3182e800 --- /dev/null +++ b/lib/WikiDB/dba.php @@ -0,0 +1,39 @@ +WikiDB($backend, $dbparams); + + if (empty($dbparams['directory']) + || preg_match('@^/tmp\b@', $dbparams['directory'])) { + $this->_warnings + = " DBA files are in the /tmp directory. " + . "Please read the INSTALL file and move " + . "the DB file to a permanent location or risk losing " + . "all the pages!"; + } + else + $this->_warnings = false; + } + + function genericWarnings () { + return $this->_warnings; + } +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiPlugin.php b/lib/WikiPlugin.php new file mode 100644 index 000000000..165479d89 --- /dev/null +++ b/lib/WikiPlugin.php @@ -0,0 +1,279 @@ +name; + } + + + function getArgs($argstr, $request, $defaults = false) { + if ($defaults === false) + $defaults = $this->getDefaultArguments(); + + list ($argstr_args, $argstr_defaults) = $this->parseArgStr($argstr); + + foreach ($defaults as $arg => $default_val) { + if (isset($argstr_args[$arg])) + $args[$arg] = $argstr_args[$arg]; + elseif ( ($argval = $request->getArg($arg)) ) + $args[$arg] = $argval; + elseif (isset($argstr_defaults[$arg])) + $args[$arg] = (string) $argstr_defaults[$arg]; + else + $args[$arg] = $default_val; + + $args[$arg] = $this->expandArg($args[$arg], $request); + + unset($argstr_args[$arg]); + unset($argstr_defaults[$arg]); + } + + foreach (array_merge($argstr_args, $argstr_defaults) as $arg => $val) { + trigger_error("$arg: argument not declared by plugin", + E_USER_NOTICE); + } + + return $args; + } + + function expandArg($argval, $request) { + return preg_replace('/\[(\w[\w\d]*)\]/e', '$request->getArg("$1")', $argval); + } + + + function parseArgStr($argstr) { + $arg_p = '\w+'; + $op_p = '(?:\|\|)?='; + $word_p = '\S+'; + $qq_p = '"[^"]*"'; + $q_p = "'[^']*'"; + $opt_ws = '\s*'; + $argspec_p = "($arg_p) $opt_ws ($op_p) $opt_ws ($qq_p|$q_p|$word_p)"; + + $args = array(); + $defaults = array(); + + while (preg_match("/^$opt_ws $argspec_p $opt_ws/x", $argstr, $m)) { + @ list(,$arg,$op,$val) = $m; + $argstr = substr($argstr, strlen($m[0])); + + // Remove quotes from string values. + if ($val && ($val[0] == '"' || $val[0] == "'")) + $val = substr($val, 1, strlen($val) - 2); + + if ($op == '=') { + $args[$arg] = $val; + } + else { + assert($op == '||='); + $defaults[$arg] = $val; + } + } + + if ($argstr) { + trigger_error("trailing cruft in plugin args: '$argstr'", E_USER_WARNING); + } + + return array($args, $defaults); + } + + + function getDefaultLinkArguments() { + return array('targetpage' => $this->getName(), + 'linktext' => $this->getName(), + 'description' => false, + 'class' => 'wikiaction'); + } + + function makeLink($argstr, $request) { + $defaults = $this->getDefaultArguments(); + $link_defaults = $this->getDefaultLinkArguments(); + $defaults = array_merge($defaults, $link_defaults); + + $args = $this->getArgs($argstr, $request, $defaults); + $plugin = $this->getName(); + + $query_args = array(); + foreach ($args as $arg => $val) { + if (isset($link_defaults[$arg])) + continue; + if ($val != $defaults[$arg]) + $query_args[$arg] = $val; + } + + $attr = array('href' => WikiURL($args['targetpage'], $query_args), + 'class' => $args['class']); + + if ($args['description']) { + $attr['title'] = $args['description']; + $attr['onmouseover'] = sprintf("window.status='%s';return true;", + str_replace("'", "\\'", $args['description'])); + $attr['onmouseout'] = "window.status='';return true;"; + } + return QElement('a', $attr, $args['linktext']); + } + + function getDefaultFormArguments() { + return array('targetpage' => $this->getName(), + 'buttontext' => $this->getName(), + 'class' => 'wikiaction', + 'method' => 'get', + 'textinput' => 's', + 'description' => false, + 'formsize' => 30); + } + + function makeForm($argstr, $request) { + $form_defaults = $this->getDefaultFormArguments(); + $defaults = array_merge($this->getDefaultArguments(), + $form_defaults); + + $args = $this->getArgs($argstr, $request, $defaults); + $plugin = $this->getName(); + $textinput = $args['textinput']; + assert(!empty($textinput) && isset($args['textinput'])); + + $formattr = array('action' => WikiURL($args['targetpage']), + 'method' => $args['method'], + 'class' => $args['class']); + $contents = ''; + foreach ($args as $arg => $val) { + if (isset($form_defaults[$arg])) + continue; + if ($arg != $textinput && $val == $defaults[$arg]) + continue; + + $attr = array('name' => $arg, 'value' => $val); + + if ($arg == $textinput) { + //if ($inputs[$arg] == 'file') + // $attr['type'] = 'file'; + //else + $attr['type'] = 'text'; + $attr['size'] = $args['formsize']; + if ($args['description']) { + $attr['title'] = $args['description']; + $attr['onmouseover'] = sprintf("window.status='%s';return true;", + str_replace("'", "\\'", $args['description'])); + $attr['onmouseout'] = "window.status='';return true;"; + } + } + else { + $attr['type'] = 'hidden'; + } + + $contents .= Element('input', $attr); + + // FIXME: hackage + if ($attr['type'] == 'file') { + $formattr['enctype'] = 'multipart/form-data'; + $formattr['method'] = 'post'; + $contents .= Element('input', + array('name' => 'MAX_FILE_SIZE', + 'value' => MAX_UPLOAD_SIZE, + 'type' => 'hidden')); + } + } + + if (!empty($args['buttontext'])) { + $contents .= Element('input', + array('type' => 'submit', + 'class' => 'button', + 'value' => $args['buttontext'])); + } + + //FIXME: can we do without this table? + return Element('form', $formattr, + Element('table', + Element('tr', + Element('td', $contents)))); + } +} + +class WikiPluginLoader { + var $_errors; + + function expandPI($pi, $dbi, $request) { + if (!preg_match('/^\s*<\?(plugin(?:-form|-link)?)\s+(\w+)\s*(.*?)\s*\?>\s*$/s', $pi, $m)) + return $this->_error("Bad PI"); + + list(, $pi_name, $plugin_name, $plugin_args) = $m; + $plugin = $this->getPlugin($plugin_name); + if (!is_object($plugin)) { + return QElement($pi_name == 'plugin-link' ? 'span' : 'p', + array('class' => 'plugin-error'), + $this->getErrorDetail()); + } + switch ($pi_name) { + case 'plugin': + return $plugin->run($dbi, $plugin_args, $request); + case 'plugin-link': + return $plugin->makeLink($plugin_args, $request); + case 'plugin-form': + return $plugin->makeForm($plugin_args, $request); + } + } + + function getPlugin($plugin_name) { + + // Note that there seems to be no way to trap parse errors + // from this include. (At least not via set_error_handler().) + $plugin_source = "lib/plugin/$plugin_name.php"; + + if (!include_once("lib/plugin/$plugin_name.php")) { + if (!empty($GLOBALS['php_errormsg'])) + return $this->_error($GLOBALS['php_errormsg']); + // If the plugin source has already been included, the include_once() + // will fail, so we don't want to crap out just yet. + $include_failed = true; + } + + $plugin_class = "WikiPlugin_$plugin_name"; + if (!class_exists($plugin_class)) { + if ($include_failed) + return $this->_error("Include of '$plugin_source' failed"); + return $this->_error("$plugin_class: no such class"); + } + + + $plugin = new $plugin_class; + if (!is_subclass_of($plugin, "WikiPlugin")) + return $this->_error("$plugin_class: not a subclass of WikiPlugin"); + + return $plugin; + } + + function getErrorDetail() { + return htmlspecialchars($this->_errors); + } + + function _error($message) { + $this->_errors = $message; + return false; + } + + +}; + +// (c-file-style: "gnu") +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/WikiUser.php b/lib/WikiUser.php new file mode 100644 index 000000000..a35e98d15 --- /dev/null +++ b/lib/WikiUser.php @@ -0,0 +1,229 @@ +_request = &$request; + // Restore from cookie. + $this->_restore(); + + if ($this->state == 'authorized' && $auth_mode == 'LOGIN') { + // ...logout + $this->realm++; + $this->state = 'loggedout'; + } + + if ($auth_mode != 'LOGOUT') { + $user = $this->_get_authenticated_userid(); + + if (!$user && $auth_mode != 'ANON_OK') + $warning = $this->_demand_http_authentication(); //NORETURN + } + + if (empty($user)) { + // Authentication failed + if ($this->state == 'authorized') + $this->realm++; + $this->state = 'loggedout'; + $this->userid = $request->get('REMOTE_HOST'); + } + else { + // Successful authentication + $this->state = 'authorized'; + $this->userid = $user; + } + + // Save state to cookie and/or session registry. + $this->_save($request); + + if (isset($warning)) + echo $warning; + } + + function id () { + return $this->userid; + } + + function authenticated_id() { + if ($this->is_authenticated()) + return $this->id(); + else + return $this->_request->get('REMOTE_ADDR'); + } + + function is_authenticated () { + return $this->state == 'authorized'; + } + + function is_admin () { + return $this->is_authenticated() && $this->userid == ADMIN_USER; + } + + function must_be_admin ($action = "") { + if (! $this->is_admin()) + { + if ($action) + $to_what = sprintf(gettext("to perform action '%s'"), $action); + else + $to_what = gettext("to do that"); + ExitWiki(gettext("You must be logged in as an administrator") + . " $to_what"); + } + } + + // This is a bit of a hack: + function setPreferences ($prefs) { + $req = &$this->_request; + $req->setCookieVar('WIKI_PREFS', $prefs, 365); // expire in a year. + } + + function getPreferences () { + $req = &$this->_request; + + $prefs = array('edit_area.width' => 80, + 'edit_area.height' => 22); + + $saved = $req->getCookieVar('WIKI_PREFS'); + + if (is_array($saved)) { + foreach ($saved as $key => $vval) { + if (isset($pref[$key]) && !empty($val)) + $prefs[$key] = $val; + } + } + + // Some sanity checks. (FIXME: should move somewhere else) + if (!($prefs['edit_area.width'] >= 30 && $prefs['edit_area.width'] <= 150)) + $prefs['edit_area.width'] = 80; + if (!($prefs['edit_area.height'] >= 5 && $prefs['edit_area.height'] <= 80)) + $prefs['edit_area.height'] = 22; + return $prefs; + } + + function _get_authenticated_userid () { + if ( ! ($user = $this->_get_http_authenticated_userid()) ) + return false; + + switch ($this->state) { + case 'login': + // Either we just asked for a password, or cookies are not enabled. + // In either case, proceed with successful login. + return $user; + case 'loggedout': + // We're logged out. Ignore http authed user. + return false; + default: + // FIXME: Can't reset auth cache on Mozilla (and probably others), + // so for now, just trust the saved state + return $this->userid; + + // Else, as long as the user hasn't changed, fine. + if ($user && $user != $this->userid) + return false; + return $user; + } + } + + function _get_http_authenticated_userid () { + global $WikiNameRegexp; + + $userid = $this->_request->get('PHP_AUTH_USER'); + $passwd = $this->_request->get('PHP_AUTH_PW'); + + if (!empty($userid) && $userid == ADMIN_USER) { + if (!empty($passwd) && $passwd == ADMIN_PASSWD) + return $userid; + } + elseif (ALLOW_BOGO_LOGIN + && preg_match('/\A' . $WikiNameRegexp . '\z/', $userid)) { + // FIXME: this shouldn't count as authenticated. + return $userid; + } + return false; + } + + function _demand_http_authentication () { + if (!defined('ADMIN_USER') || !defined('ADMIN_PASSWD') + || ADMIN_USER == '' || ADMIN_PASSWD =='') { + return + "

    " + . gettext("You must set the administrator account and password before you can log in.") + . "

    \n"; + } + + // Request password + $this->userid = ''; + $this->state = 'login'; + + $this->_save(); + header('WWW-Authenticate: Basic realm="' . $this->realm . '"'); + header("HTTP/1.0 401 Unauthorized"); + if (ACCESS_LOG) + $LogEntry->status = 401; + echo "

    " . gettext ("You entered an invalid login or password.") . "\n"; + if (ALLOW_BOGO_LOGIN) { + echo "

    "; + echo gettext ("You can log in using any valid WikiWord as a user ID.") . "\n"; + echo gettext ("(Any password will work, except, of course for the admin user.)") . "\n"; + } + + ExitWiki(); + } + + function _copy($object) { + if (!is_object($object)) + return false; + if (strtolower(get_class($object)) != 'wikiuser') + return false; + + $this->userid = $object->userid; + $this->state = $object->state; + $this->realm = $object->realm; + return true; + } + + function _restore() { + $req = &$this->_request; + + if ( $this->_copy($req->getSessionVar('auth_state')) ) + return; + elseif ( $this->_copy($req->getCookieVar('WIKI_AUTH')) ) + return; + else { + // Default state. + $this->userid = ''; + $this->state = 'login'; + $this->realm = 'PhpWiki0000'; + } + } + + function _save() { + $req = &$this->_request; + + $req->setSessionVar('auth_state', $this); + $req->setCookieVar('WIKI_AUTH', $this); + } +} + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: +?> diff --git a/lib/config.php b/lib/config.php index 59cdf750d..30bd55b22 100644 --- a/lib/config.php +++ b/lib/config.php @@ -1,5 +1,5 @@ \n"; - } - - rsort($res); - reset($res); - return($res); - } - - function MostPopularNextMatch($dbi, $res) { - return; - // the return result is a two element array with 'hits' - // and 'pagename' as the keys - - if (list($index1, $index2, $pagename, $hits) = each($res)) { - echo "most popular next match called
    \n"; - echo "got $pagename, $hits back
    \n"; - $nextpage = array( - "hits" => $hits, - "pagename" => $pagename - ); - return $nextpage; - } else { - return 0; - } - } - - function GetAllWikiPagenames($dbi) { - $namelist = array(); - $d = opendir($dbi); - $curr = 0; - while($entry = readdir($d)) { - $namelist[$curr++] = $entry; - } - - return $namelist; - } -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/dbalib.php b/lib/dbalib.php deleted file mode 100644 index f0d5e9016..000000000 --- a/lib/dbalib.php +++ /dev/null @@ -1,279 +0,0 @@ - _dbname('pagesdb'), - 'archive' => _dbname('archivedb'), - 'wikilinks' => _dbname('linksdb'), - 'hottopics' => _dbname('hottopicsdb'), - 'hitcount' => _dbname('hitcountdb')); - -if (preg_match('@^/tmp\b@', $DBParams['directory'])) - $DBWarning = "DBA files are in the /tmp directory."; - -define('MAX_DBM_ATTEMPTS', $DBParams['timeout']); - -// open a database and return the handle -// loop until we get a handle; php has its own -// locking mechanism, thank god. -// Suppress ugly error message with @. - -function OpenDataBase($dbname) { - global $WikiDB; // hash of all the DBM file names - - reset($WikiDB); - while (list($key, $file) = each($WikiDB)) { - while (($dbi[$key] = @dba_open($file, "c", "gdbm")) < 1) { - $numattempts++; - if ($numattempts > MAX_DBM_ATTEMPTS) { - ExitWiki("Cannot open database '$key' : '$file', giving up."); - } - sleep(1); - } - } - return $dbi; -} - - -function CloseDataBase($dbi) { - reset($dbi); - while (list($dbmfile, $dbihandle) = each($dbi)) { - dba_close($dbihandle); - } - return; -} - - -// take a serialized hash, return same padded out to -// the next largest number bytes divisible by 500. This -// is to save disk space in the long run, since DBM files -// leak memory. -function PadSerializedData($data) { - // calculate the next largest number divisible by 500 - $nextincr = 500 * ceil(strlen($data) / 500); - // pad with spaces - $data = sprintf("%-${nextincr}s", $data); - return $data; -} - -// strip trailing whitespace from the serialized data -// structure. -function UnPadSerializedData($data) { - return chop($data); -} - -// Return hash of page + attributes or default -function RetrievePage($dbi, $pagename, $pagestore) { - if ($data = dba_fetch($pagename, $dbi[$pagestore])) { - // unserialize $data into a hash - $pagehash = unserialize(UnPadSerializedData($data)); - return $pagehash; - } else { - return -1; - } -} - -// Either insert or replace a key/value (a page) -function InsertPage($dbi, $pagename, $pagehash) { - $pagedata = PadSerializedData(serialize($pagehash)); - - if (!dba_insert($pagename, $pagedata, $dbi['wiki'])) { - if (!dba_replace($pagename, $pagedata, $dbi['wiki'])) { - ExitWiki("Error inserting page '$pagename'"); - } - } -} - - -// for archiving pages to a seperate dbm -function SaveCopyToArchive($dbi, $pagename, $pagehash) { - global $ArchivePageStore; - - $pagedata = PadSerializedData(serialize($pagehash)); - - if (!dba_insert($pagename, $pagedata, $dbi[$ArchivePageStore])) { - if (!dba_replace($pagename, $pagedata, $dbi['archive'])) { - ExitWiki("Error storing '$pagename' into archive"); - } - } -} - - -function IsWikiPage($dbi, $pagename) { - return dba_exists($pagename, $dbi['wiki']); -} - - -function IsInArchive($dbi, $pagename) { - return dba_exists($pagename, $dbi['archive']); -} - - -// setup for title-search -function InitTitleSearch($dbi, $search) { - $pos['search'] = $search; - $pos['key'] = dba_firstkey($dbi['wiki']); - - return $pos; -} - -// iterating through database -function TitleSearchNextMatch($dbi, &$pos) { - while ($pos['key']) { - $page = $pos['key']; - $pos['key'] = dba_nextkey($dbi['wiki']); - - if (eregi($pos['search'], $page)) { - return $page; - } - } - return 0; -} - -// setup for full-text search -function InitFullSearch($dbi, $search) { - return InitTitleSearch($dbi, $search); -} - -//iterating through database -function FullSearchNextMatch($dbi, &$pos) { - while ($pos['key']) { - $key = $pos['key']; - $pos['key'] = dba_nextkey($dbi['wiki']); - - $pagedata = dba_fetch($key, $dbi['wiki']); - // test the serialized data - if (eregi($pos['search'], $pagedata)) { - $page['pagename'] = $key; - $pagedata = unserialize(UnPadSerializedData($pagedata)); - $page['content'] = $pagedata['content']; - return $page; - } - } - return 0; -} - -//////////////////////// -// new database features - - -function IncreaseHitCount($dbi, $pagename) { - - if (dba_exists($pagename, $dbi['hitcount'])) { - // increase the hit count - // echo "$pagename there, incrementing...
    \n"; - $count = dba_fetch($pagename, $dbi['hitcount']); - $count++; - dba_replace($pagename, $count, $dbi['hitcount']); - } else { - // add it, set the hit count to one - // echo "adding $pagename to hitcount...
    \n"; - $count = 1; - dba_insert($pagename, $count, $dbi['hitcount']); - } -} - -function GetHitCount($dbi, $pagename) { - - if (dba_exists($pagename, $dbi['hitcount'])) { - // increase the hit count - $count = dba_fetch($pagename, $dbi['hitcount']); - return $count; - } else { - return 0; - } -} - - -function InitMostPopular($dbi, $limit) { - // iterate through the whole dbm file for hit counts - // sort the results highest to lowest, and return - // n..$limit results - - $pagename = dba_firstkey($dbi['hitcount']); - $res[$pagename] = dba_fetch($pagename, $dbi['hitcount']); - - while ($pagename = dba_nextkey($dbi['hitcount'])) { - $res[$pagename] = dba_fetch($pagename, $dbi['hitcount']); - //echo "got $pagename with value " . $res[$pagename] . "
    \n"; - } - - arsort($res); - return($res); -} - -function MostPopularNextMatch($dbi, &$res) { - - // the return result is a two element array with 'hits' - // and 'pagename' as the keys - - if (count($res) == 0) - return 0; - - if (list($pagename, $hits) = each($res)) { - //echo "most popular next match called
    \n"; - //echo "got $pagename, $hits back
    \n"; - $nextpage = array( - "hits" => $hits, - "pagename" => $pagename - ); - // $dbm_mostpopular_cntr++; - return $nextpage; - } else { - return 0; - } -} - -function GetAllWikiPagenames($dbi) { - $namelist = array(); - $ctr = 0; - - $namelist[$ctr] = $key = dba_firstkey($dbi); - - while ($key = dba_nextkey($dbi)) { - $ctr++; - $namelist[$ctr] = $key; - } - - return $namelist; -} - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/dbmlib.php b/lib/dbmlib.php deleted file mode 100644 index 1c04905a7..000000000 --- a/lib/dbmlib.php +++ /dev/null @@ -1,509 +0,0 @@ - _dbname('pagesdb'), - 'archive' => _dbname('archivedb'), - 'wikilinks' => _dbname('linksdb'), - 'hottopics' => _dbname('hottopicsdb'), - 'hitcount' => _dbname('hitcountdb')); - -if (preg_match('@^/tmp\b@', $DBParams['directory'])) - $DBWarning = "DBM files are in the /tmp directory."; - -define('MAX_DBM_ATTEMPTS', $DBParams['timeout']); - - -// open a database and return the handle -// loop until we get a handle; php has its own -// locking mechanism, thank god. -// Suppress ugly error message with @. - -function OpenDataBase($dbname) { - global $WikiDB; // hash of all the DBM file names - - reset($WikiDB); - while (list($key, $file) = each($WikiDB)) { - while (($dbi[$key] = @dbmopen($file, "c")) < 1) { - $numattempts++; - if ($numattempts > MAX_DBM_ATTEMPTS) { - ExitWiki("Cannot open database '$key' : '$file', giving up."); - } - sleep(1); - } - } - return $dbi; -} - - -function CloseDataBase($dbi) { - reset($dbi); - while (list($dbmfile, $dbihandle) = each($dbi)) { - dbmclose($dbihandle); - } - return; -} - - -// take a serialized hash, return same padded out to -// the next largest number bytes divisible by 500. This -// is to save disk space in the long run, since DBM files -// leak memory. -function PadSerializedData($data) { - // calculate the next largest number divisible by 500 - $nextincr = 500 * ceil(strlen($data) / 500); - // pad with spaces - $data = sprintf("%-${nextincr}s", $data); - return $data; -} - -// strip trailing whitespace from the serialized data -// structure. -function UnPadSerializedData($data) { - return chop($data); -} - - - -// Return hash of page + attributes or default -function RetrievePage($dbi, $pagename, $pagestore) { - if ($data = dbmfetch($dbi[$pagestore], $pagename)) { - // unserialize $data into a hash - $pagehash = unserialize(UnPadSerializedData($data)); - return $pagehash; - } else { - return -1; - } -} - - -// Either insert or replace a key/value (a page) -function InsertPage($dbi, $pagename, $pagehash, $pagestore='wiki') { - - if ($pagestore == 'wiki') { // a bit of a hack - $linklist = ExtractWikiPageLinks($pagehash['content']); - SetWikiPageLinks($dbi, $pagename, $linklist); - } - - $pagedata = PadSerializedData(serialize($pagehash)); - - if (dbminsert($dbi[$pagestore], $pagename, $pagedata)) { - if (dbmreplace($dbi[$pagestore], $pagename, $pagedata)) { - ExitWiki("Error inserting page '$pagename'"); - } - } -} - - -// for archiving pages to a separate dbm -function SaveCopyToArchive($dbi, $pagename, $pagehash) { - global $ArchivePageStore; - - $pagedata = PadSerializedData(serialize($pagehash)); - - if (dbminsert($dbi[$ArchivePageStore], $pagename, $pagedata)) { - if (dbmreplace($dbi['archive'], $pagename, $pagedata)) { - ExitWiki("Error storing '$pagename' into archive"); - } - } -} - - -function IsWikiPage($dbi, $pagename) { - return dbmexists($dbi['wiki'], $pagename); -} - - -function IsInArchive($dbi, $pagename) { - return dbmexists($dbi['archive'], $pagename); -} - - -function RemovePage($dbi, $pagename) { - - dbmdelete($dbi['wiki'], $pagename); // report error if this fails? - dbmdelete($dbi['archive'], $pagename); // no error if this fails - dbmdelete($dbi['hitcount'], $pagename); // no error if this fails - - $linkinfo = RetrievePage($dbi, $pagename, 'wikilinks'); - - // remove page from fromlinks of pages it had links to - if (is_array($linkinfo)) { // page exists? - $tolinks = $linkinfo['tolinks']; - reset($tolinks); - while (list($tolink, $dummy) = each($tolinks)) { - $tolinkinfo = RetrievePage($dbi, $tolink, 'wikilinks'); - if (is_array($tolinkinfo)) { // page found? - $oldFromlinks = $tolinkinfo['fromlinks']; - $tolinkinfo['fromlinks'] = array(); // erase fromlinks - reset($oldFromlinks); - while (list($fromlink, $dummy) = each($oldFromlinks)) { - if ($fromlink != $pagename) // not to be erased? - $tolinkinfo['fromlinks'][$fromlink] = 1; // put link back - } // put link info back in DBM file - InsertPage($dbi, $tolink, $tolinkinfo, 'wikilinks'); - } - } - - // remove page itself - dbmdelete($dbi['wikilinks'], $pagename); - } -} - - -// setup for title-search -function InitTitleSearch($dbi, $search) { - $pos['search'] = $search; - $pos['key'] = dbmfirstkey($dbi['wiki']); - - return $pos; -} - - -// iterating through database -function TitleSearchNextMatch($dbi, &$pos) { - while ($pos['key']) { - $page = $pos['key']; - $pos['key'] = dbmnextkey($dbi['wiki'], $pos['key']); - - if (eregi($pos['search'], $page)) { - return $page; - } - } - return 0; -} - - -// setup for full-text search -function InitFullSearch($dbi, $search) { - return InitTitleSearch($dbi, $search); -} - - -//iterating through database -function FullSearchNextMatch($dbi, &$pos) { - while ($pos['key']) { - $key = $pos['key']; - $pos['key'] = dbmnextkey($dbi['wiki'], $pos['key']); - - $pagedata = dbmfetch($dbi['wiki'], $key); - // test the serialized data - if (eregi($pos['search'], $pagedata)) { - $page['pagename'] = $key; - $pagedata = unserialize(UnPadSerializedData($pagedata)); - $page['content'] = $pagedata['content']; - return $page; - } - } - return 0; -} - - -//////////////////////// -// new database features - -function IncreaseHitCount($dbi, $pagename) { - - if (dbmexists($dbi['hitcount'], $pagename)) { - // increase the hit count - // echo "$pagename there, incrementing...
    \n"; - $count = dbmfetch($dbi['hitcount'], $pagename); - $count++; - dbmreplace($dbi['hitcount'], $pagename, $count); - } else { - // add it, set the hit count to one - $count = 1; - dbminsert($dbi['hitcount'], $pagename, $count); - } -} - - -function GetHitCount($dbi, $pagename) { - - if (dbmexists($dbi['hitcount'], $pagename)) { - // increase the hit count - $count = dbmfetch($dbi['hitcount'], $pagename); - return $count; - } else { - return 0; - } -} - - -function InitMostPopular($dbi, $limit) { - // iterate through the whole dbm file for hit counts - // sort the results highest to lowest, and return - // n..$limit results - - // Because sorting all the pages may be a lot of work - // we only get the top $limit. A page is only added if it's score is - // higher than the lowest score in the list. If the list is full then - // one of the pages with the lowest scores is removed. - - $pagename = dbmfirstkey($dbi['hitcount']); - $score = dbmfetch($dbi['hitcount'], $pagename); - $res = array($pagename => (int) $score); - $lowest = $score; - - while ($pagename = dbmnextkey($dbi['hitcount'], $pagename)) { - $score = dbmfetch($dbi['hitcount'], $pagename); - if (count($res) < $limit) { // room left in $res? - if ($score < $lowest) - $lowest = $score; - $res[$pagename] = (int) $score; // add page to $res - } elseif ($score > $lowest) { - $oldres = $res; // save old result - $res = array(); - $removed = 0; // nothing removed yet - $newlowest = $score; // new lowest score - $res[$pagename] = (int) $score; // add page to $res - reset($oldres); - while(list($pname, $pscore) = each($oldres)) { - if (!$removed and ($pscore = $lowest)) - $removed = 1; // don't copy this entry - else { - $res[$pname] = (int) $pscore; - if ($pscore < $newlowest) - $newlowest = $pscore; - } - } - $lowest = $newlowest; - } - } - - arsort($res); // sort - reset($res); - - return($res); -} - - -function MostPopularNextMatch($dbi, &$res) { - - // the return result is a two element array with 'hits' - // and 'pagename' as the keys - - if (list($pagename, $hits) = each($res)) { - $nextpage = array( - "hits" => $hits, - "pagename" => $pagename - ); - return $nextpage; - } else { - return 0; - } -} - - -function GetAllWikiPagenames($dbi) { - $namelist = array(); - $ctr = 0; - - $namelist[$ctr] = $key = dbmfirstkey($dbi); - - while ($key = dbmnextkey($dbi, $key)) { - $ctr++; - $namelist[$ctr] = $key; - } - - return $namelist; -} - - -//////////////////////////////////////////// -// functionality for the wikilinks DBM file - -// format of the 'wikilinks' DBM file : -// pagename => -// { tolinks => ( pagename => 1}, fromlinks => { pagename => 1 } } - - // takes a page name, returns array of scored incoming and outgoing links -function GetWikiPageLinks($dbi, $pagename) { - - $linkinfo = RetrievePage($dbi, $pagename, 'wikilinks'); - if (is_array($linkinfo)) { // page exists? - $tolinks = $linkinfo['tolinks']; // outgoing links - $fromlinks = $linkinfo['fromlinks']; // incoming links - } else { // new page, but pages may already point to it - // create info for page - $tolinks = array(); - $fromlinks = array(); - // look up pages that link to $pagename - $pname = dbmfirstkey($dbi['wikilinks']); - while ($pname) { - $linkinfo = RetrievePage($dbi, $pname, 'wikilinks'); - if ($linkinfo['tolinks'][$pagename]) // $pname links to $pagename? - $fromlinks[$pname] = 1; - $pname = dbmnextkey($dbi['wikilinks'], $pname); - } - } - - // get and sort the outgoing links - $outlinks = array(); - reset($tolinks); // look up scores for tolinks - while(list($tolink, $dummy) = each($tolinks)) { - $toPage = RetrievePage($dbi, $tolink, 'wikilinks'); - if (is_array($toPage)) // link to internal page? - $outlinks[$tolink] = count($toPage['fromlinks']); - } - arsort($outlinks); // sort on score - $links['out'] = array(); - reset($outlinks); // convert to right format - while(list($link, $score) = each($outlinks)) - $links['out'][] = array($link, $score); - - // get and sort the incoming links - $inlinks = array(); - reset($fromlinks); // look up scores for fromlinks - while(list($fromlink, $dummy) = each($fromlinks)) { - $fromPage = RetrievePage($dbi, $fromlink, 'wikilinks'); - $inlinks[$fromlink] = count($fromPage['fromlinks']); - } - arsort($inlinks); // sort on score - $links['in'] = array(); - reset($inlinks); // convert to right format - while(list($link, $score) = each($inlinks)) - $links['in'][] = array($link, $score); - - // sort all the incoming and outgoing links - $allLinks = $outlinks; // copy the outlinks - reset($inlinks); // add the inlinks - while(list($key, $value) = each($inlinks)) - $allLinks[$key] = $value; - reset($allLinks); // lookup hits - while(list($key, $value) = each($allLinks)) - $allLinks[$key] = (int) dbmfetch($dbi['hitcount'], $key); - arsort($allLinks); // sort on hits - $links['popular'] = array(); - reset($allLinks); // convert to right format - while(list($link, $hits) = each($allLinks)) - $links['popular'][] = array($link, $hits); - - return $links; -} - - -// takes page name, list of links it contains -// the $linklist is an array where the keys are the page names -function SetWikiPageLinks($dbi, $pagename, $linklist) { - - $cache = array(); - - // Phase 1: fetch the relevant pairs from 'wikilinks' into $cache - // --------------------------------------------------------------- - - // first the info for $pagename - $linkinfo = RetrievePage($dbi, $pagename, 'wikilinks'); - if (is_array($linkinfo)) // page exists? - $cache[$pagename] = $linkinfo; - else { - // create info for page - $cache[$pagename] = array( 'fromlinks' => array(), - 'tolinks' => array() - ); - // look up pages that link to $pagename - $pname = dbmfirstkey($dbi['wikilinks']); - while ($pname) { - $linkinfo = RetrievePage($dbi, $pname, 'wikilinks'); - if ($linkinfo['tolinks'][$pagename]) - $cache[$pagename]['fromlinks'][$pname] = 1; - $pname = dbmnextkey($dbi['wikilinks'], $pname); - } - } - - // then the info for the pages that $pagename used to point to - $oldTolinks = $cache[$pagename]['tolinks']; - reset($oldTolinks); - while (list($link, $dummy) = each($oldTolinks)) { - $linkinfo = RetrievePage($dbi, $link, 'wikilinks'); - if (is_array($linkinfo)) - $cache[$link] = $linkinfo; - } - - // finally the info for the pages that $pagename will point to - reset($linklist); - while (list($link, $dummy) = each($linklist)) { - $linkinfo = RetrievePage($dbi, $link, 'wikilinks'); - if (is_array($linkinfo)) - $cache[$link] = $linkinfo; - } - - // Phase 2: delete the old links - // --------------------------------------------------------------- - - // delete the old tolinks for $pagename - // $cache[$pagename]['tolinks'] = array(); - // (overwritten anyway in Phase 3) - - // remove $pagename from the fromlinks of pages in $oldTolinks - - reset($oldTolinks); - while (list($oldTolink, $dummy) = each($oldTolinks)) { - if ($cache[$oldTolink]) { // links to existing page? - $oldFromlinks = $cache[$oldTolink]['fromlinks']; - $cache[$oldTolink]['fromlinks'] = array(); // erase fromlinks - reset($oldFromlinks); // comp. new fr.links - while (list($fromlink, $dummy) = each($oldFromlinks)) { - if ($fromlink != $pagename) - $cache[$oldTolink]['fromlinks'][$fromlink] = 1; - } - } - } - - // Phase 3: add the new links - // --------------------------------------------------------------- - - // set the new tolinks for $pagename - $cache[$pagename]['tolinks'] = $linklist; - - // add $pagename to the fromlinks of pages in $linklist - reset($linklist); - while (list($link, $dummy) = each($linklist)) { - if ($cache[$link]) // existing page? - $cache[$link]['fromlinks'][$pagename] = 1; - } - - // Phase 4: write $cache back to 'wikilinks' - // --------------------------------------------------------------- - - reset($cache); - while (list($link,$fromAndTolinks) = each($cache)) - InsertPage($dbi, $link, $fromAndTolinks, 'wikilinks'); - -} - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/diff.php b/lib/diff.php index e6ab21a83..39de7f9af 100644 --- a/lib/diff.php +++ b/lib/diff.php @@ -1,5 +1,5 @@ 'right'), $label); - - if (is_array($hash)) { - extract($hash); - $cols .= QElement('td', - sprintf(gettext ("version %s"), $version)); - $cols .= QElement('td', - sprintf(gettext ("last modified on %s"), - strftime($datetimeformat, $lastmodified))); - $cols .= QElement('td', - sprintf(gettext ("by %s"), $author)); + if ($rev) { + $url = WikiURL($pagename, array('version' => $rev->getVersion())); + $linked_version = QElement('a', array('href' => $url), $rev->getVersion()); + $cols .= Element('td', + gettext("version") . " " . $linked_version); + + $cols .= QElement('td', + sprintf(gettext ("last modified on %s"), + strftime($datetimeformat, $rev->get('mtime')))); + $cols .= QElement('td', + sprintf(gettext ("by %s"), $rev->get('author'))); } else { - $cols .= QElement('td', array('colspan' => '3'), - gettext ("None")); + $cols .= QElement('td', array('colspan' => '3'), + gettext ("None")); } return Element('tr', $cols); } -if (isset($pagename)) -{ - if (!isset($ver1)) { - if (isset($ver2)) $ver1 = $ver2 - 1; - else { - $ver1 = GetMaxVersionNumber($dbi, $pagename, $ArchivePageStore); - $ver2 = 0; - } - } - elseif (!isset($ver2)) $ver2 = 0; - - $older = RetrievePage($dbi, $pagename, SelectStore($dbi, $pagename, $ver1, $WikiPageStore, $ArchivePageStore), $ver1); - $newer = RetrievePage($dbi, $pagename, SelectStore($dbi, $pagename, $ver2, $WikiPageStore, $ArchivePageStore), $ver2); - - $html = Element('table', - PageInfoRow(gettext ("Newer page:"), $newer) - . PageInfoRow(gettext ("Older page:"), $older)); - - $html .= "

    \n"; - - if (is_array($newer) && is_array($older)) - { - $diff = new WikiDiff($older['content'], $newer['content']); - if ($diff->isEmpty()) { - $html .= '


    [' . gettext ("Versions are identical") . ']'; - } else { - //$fmt = new WikiDiffFormatter; - $fmt = new WikiUnifiedDiffFormatter; - $html .= $fmt->format($diff, $older['content']); - } +function showDiff ($dbi, $request) { + $pagename = $request->getArg('pagename'); + $version = $request->getArg('version'); + $previous = $request->getArg('previous'); + + $page = $dbi->getPage($pagename); + + if ($version) { + if (!($new = $page->getRevision($version))) + NoSuchRevision($page, $version); + $new_version = sprintf(gettext("version %d"), $version); + } + else { + $new = $page->getCurrentRevision(); + $new_version = gettext('current version'); + } + + if (preg_match('/^\d+$/', $previous)) { + if ( !($old = $page->getRevision($previous)) ) + NoSuchRevision($page, $previous); + $old_version = sprintf(gettext("version %d"), $previous); + $others = array('major', 'minor', 'author'); + } + else { + switch ($previous) { + case 'major': + $old = $new; + while ($old = $page->getRevisionBefore($old)) { + if (! $old->get('is_minor_edit')) + break; + } + $old_version = gettext("previous major revision"); + $others = array('minor', 'author'); + break; + case 'author': + $old = $new; + while ($old = $page->getRevisionBefore($old)) { + if ($old->get('author') != $new->get('author')) + break; + } + $old_version = gettext("revision by previous author"); + $others = array('major', 'minor'); + break; + case 'minor': + default: + $previous='minor'; + $old = $page->getRevisionBefore($new); + $old_version = gettext("previous revision"); + $others = array('major', 'author'); + break; + } } - echo GeneratePage('MESSAGE', $html, - sprintf(gettext ("Diff of %s."), $pagename), 0); + $new_url = WikiURL($pagename, array('version' => $new->getVersion())); + $new_link = QElement('a', array('href' => $new_url), $new_version); + $old_url = WikiURL($pagename, array('version' => $old ? $old->getVersion() : 0)); + $old_link = QElement('a', array('href' => $old_url), $old_version); + $page_link = LinkExistingWikiWord($pagename); + + $html = Element('p', + sprintf(htmlspecialchars(gettext("Differences between %s and %s of %s.")), + $new_link, $old_link, $page_link)); + + $otherdiffs=''; + $label = array('major' => gettext("Previous Major Revision"), + 'minor' => gettext("Previous Revision"), + 'author'=> gettext("Previous Author")); + foreach ($others as $other) { + $args = array('action' => 'diff', 'previous' => $other); + if ($version) + $args['version'] = $version; + $otherdiffs .= ' ' . QElement('a', array('href' => WikiURL($pagename, $args), + 'class' => 'wikiaction'), + $label[$other]); + } + $html .= Element('p', + htmlspecialchars(gettext("Other diffs:")) + . $otherdiffs); + + + if ($old and $old->getVersion() == 0) + $old = false; + + $html .= Element('table', + PageInfoRow($pagename, gettext ("Newer page:"), $new) + . PageInfoRow($pagename, gettext ("Older page:"), $old)); + + $html .= "

    \n"; + + if ($new && $old) { + $diff = new WikiDiff($old->getContent(), $new->getContent()); + if ($diff->isEmpty()) { + $html .= '


    [' . gettext ("Versions are identical") . ']'; + } + else { + //$fmt = new WikiDiffFormatter; + $fmt = new WikiUnifiedDiffFormatter; + $html .= $fmt->format($diff, $old->getContent()); + } + } + + include_once('lib/Template.php'); + echo GeneratePage('MESSAGE', $html, + sprintf(gettext ("Diff: %s"), $pagename)); } + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: ?> diff --git a/lib/display.php b/lib/display.php index 2c2dc462e..6956bc768 100644 --- a/lib/display.php +++ b/lib/display.php @@ -1,27 +1,33 @@ getArg('pagename'); + $version = $request->getArg('version'); + + $page = $dbi->getPage($pagename); + if ($version) { + $revision = $page->getRevision($version); + if (!$revision) + NoSuchRevision($page, $version); + } + else { + $revision = $page->getCurrentRevision(); } - echo GeneratePage('BROWSE', $html, $pagename, $pagehash); + $template = new WikiTemplate('BROWSE'); + $template->setPageRevisionTokens($revision); + $template->replace('CONTENT', do_transform($revision->getContent())); + echo $template->getExpansion(); flush(); + $page->increaseHitCount(); +} - IncreaseHitCount($dbi, $pagename); // For emacs users // Local Variables: // mode: php diff --git a/lib/editpage.php b/lib/editpage.php index f0e20e5d2..43fe9be81 100644 --- a/lib/editpage.php +++ b/lib/editpage.php @@ -1,44 +1,62 @@ is_admin()) { - $html = "

    "; - $html .= gettext ("This page has been locked by the administrator and cannot be edited."); - $html .= "\n

    "; - $html .= gettext ("Sorry for the inconvenience."); - $html .= "\n"; - echo GeneratePage('MESSAGE', $html, sprintf (gettext ("Problem while editing %s"), $pagename), 0); - ExitWiki (""); - } - - $textarea = htmlspecialchars(implode("\n", $pagehash["content"])); - } - else { - if (preg_match("/^${WikiNameRegexp}\$/", $pagename)) $newpage = $pagename; - else $newpage = "[$pagename]"; - - $textarea = htmlspecialchars(sprintf(gettext("Describe %s here."), $newpage)); - - unset($pagehash); - $pagehash["version"] = 0; - $pagehash["lastmodified"] = time(); - $pagehash["author"] = ''; - $currentpage = $pagehash; - } - - echo GeneratePage('EDITPAGE', $textarea, $pagename, $pagehash); - -// For emacs users +rcs_id('$Id: editpage.php,v 1.17 2001-09-18 19:16:23 dairiki Exp $'); + +require_once('lib/Template.php'); + +function editPage($dbi, $request) { + // editpage relies on $pagename, $version + $pagename = $request->getArg('pagename'); + $version = $request->getArg('version'); + + $page = $dbi->getPage($pagename); + $current = $page->getCurrentRevision(); + + if ($version === false) { + $selected = $current; + } + else { + $selected = $page->getRevision($version); + if (!$selected) + NoSuchRevision($page, $version); // noreturn + } + + global $user; // FIXME: make this non-global. + if ($page->get('locked') && !$user->is_admin()) { + $html = "

    "; + $html .= gettext ("This page has been locked by the administrator and cannot be edited."); + $html .= "\n

    "; + $html .= gettext ("Sorry for the inconvenience."); + $html .= "\n"; + + echo GeneratePage('MESSAGE', $html, + sprintf(gettext("Problem while editing %s"), $args->pagename), + $selected); + ExitWiki (""); + } + + + $age = time() - $current->get('mtime'); + $minor_edit = ( $age < MINOR_EDIT_TIMEOUT && $current->get('author') == $user->id() ); + + $formvars = array('content' => htmlspecialchars($selected->getPackedContent()), + 'minor_edit' => $minor_edit ? 'checked' : '', + 'version' => $selected->getVersion(), + 'editversion' => $current->getVersion(), + 'summary' => '', + 'convert' => '', + 'pagename' => htmlspecialchars($pagename)); + + $template = new WikiTemplate('EDITPAGE'); + $template->setPageRevisionTokens($selected); + $template->replace('FORMVARS', $formvars); + echo $template->getExpansion(); +} + // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/fullsearch.php b/lib/fullsearch.php index efcbfbe8a..7d16e4d81 100644 --- a/lib/fullsearch.php +++ b/lib/fullsearch.php @@ -1,48 +1,51 @@ " - . sprintf(gettext ("Searching for \"%s\" ....."), - htmlspecialchars($searchterm)) - . "

    \n
    \n"; - - // search matching pages - $query = InitFullSearch($dbi, $searchterm); - - // quote regexp chars (space are treated as "or" operator) - $qterm = preg_replace("/\s+/", "|", preg_quote($searchterm)); - - $found = 0; - $count = 0; - while ($pagehash = FullSearchNextMatch($dbi, $query)) { - $html .= "
    " . LinkExistingWikiWord($pagehash["pagename"]) . "\n"; - $count++; - - // print out all matching lines, highlighting the match - for ($j = 0; $j < (count($pagehash["content"])); $j++) { - if ($hits = preg_match_all("/$qterm/i", $pagehash["content"][$j], $dummy)) { - $matched = preg_replace("/$qterm/i", - "${FieldSeparator}OT\\0${FieldSeparator}CT", - $pagehash["content"][$j]); - $matched = htmlspecialchars($matched); - $matched = str_replace("${FieldSeparator}OT", '', $matched); - $matched = str_replace("${FieldSeparator}CT", '', $matched); - $html .= "
    $matched
    \n"; +// Search the text of pages for a match. +rcs_id('$Id: fullsearch.php,v 1.7 2001-09-18 19:16:23 dairiki Exp $'); +require_once('lib/Template.php'); +require_once('lib/TextSearchQuery.php'); + +$query = new TextSearchQuery($args->get('searchterm')); + +$html = ("

    " + . sprintf(gettext ("Searching for \"%s\" ....."), + htmlspecialchars($args->get('searchterm'))) + . "

    \n
    \n" ); + +// search matching pages +$iter = $dbi->fullsearch($query); + +// quote regexp chars (space are treated as "or" operator) +$hilight_re = $query->getHighlightRegexp(); + +$found = 0; +$count = 0; +while ($page = $iter->next()) { + $html .= "
    " . LinkExistingWikiWord($page->getName()) . "\n"; + $count++; + if (empty($hilight_re)) + continue; // nothing to highlight + + // print out all matching lines, highlighting the match + $current = $page->getCurrentRevision(); + $matches = preg_grep("/$hilight_re/i", $current->getContent()); + foreach ($matches as $line) { + if ($hits = preg_match_all("/$hilight_re/i", $line, $dummy)) { + $line = preg_replace("/$hilight_re/i", + "${FieldSeparator}OT\\0${FieldSeparator}CT", + $line); + $line = htmlspecialchars($line); + $line = str_replace("${FieldSeparator}OT", '', $line); + $line = str_replace("${FieldSeparator}CT", '', $line); + $html .= "
    $line
    \n"; $found += $hits; - } - } - } - - $html .= "
    \n
    " - . sprintf (gettext ("%d matches found in %d pages."), - $found, $count) - . "\n"; - - echo GeneratePage('MESSAGE', $html, gettext ("Full Text Search Results"), 0); + } + } +} + +$html .= ( "
    \n
    " + . sprintf (gettext ("%d matches found in %d pages."), + $found, $count) + . "\n"); + +echo GeneratePage('MESSAGE', $html, sprintf(gettext("Full Text Search: %s"), $searchterm)); ?> diff --git a/lib/interwiki.php b/lib/interwiki.php index 29fe0a51e..7ca8e2175 100644 --- a/lib/interwiki.php +++ b/lib/interwiki.php @@ -1,4 +1,4 @@ - 'interwiki'), - htmlspecialchars("$wiki:") . - QElement('span', array('class' => 'wikiword'), $page)); + $class = 'named-interwiki'; + } + else { + $linktext = ( htmlspecialchars("$wiki:") + . QElement('span', array('class' => 'wikipage'), $page) ); + $class = 'interwiki'; + } return Element('a', array('href' => $url, - 'class' => 'interwikilink'), + 'class' => $class), $linktext); } diff --git a/lib/loadsave.php b/lib/loadsave.php index 47e0e39cd..18a8726ff 100644 --- a/lib/loadsave.php +++ b/lib/loadsave.php @@ -1,17 +1,19 @@ .*', '', - GeneratePage('MESSAGE', $html, $title, 0)); + GeneratePage('MESSAGE', $html, $title, 0)); } + function EndLoadDump() { // FIXME: This is a hack + echo Element('p', QElement('b', gettext("Complete."))); echo Element('p', "Return to " . LinkExistingWikiWord($GLOBALS['pagename'])); echo "\n"; @@ -24,25 +26,30 @@ function EndLoadDump() // //////////////////////////////////////////////////////////////// -function MailifyPage ($pagehash, $oldpagehash = false) +function MailifyPage ($page, $nversions = 1) { - global $SERVER_ADMIN, $ArchivePageStore; - + global $SERVER_ADMIN; + + $current = $page->getCurrentRevision(); $from = isset($SERVER_ADMIN) ? $SERVER_ADMIN : 'foo@bar'; $head = "From $from " . ctime(time()) . "\r\n"; - $head .= "Subject: " . rawurlencode($pagehash['pagename']) . "\r\n"; + $head .= "Subject: " . rawurlencode($page->getName()) . "\r\n"; $head .= "From: $from (PhpWiki)\r\n"; - $head .= "Date: " . rfc1123date($pagehash['lastmodified']) . "\r\n"; + $head .= "Date: " . rfc1123date($current->get('mtime')) . "\r\n"; $head .= sprintf("Mime-Version: 1.0 (Produced by PhpWiki %s)\r\n", PHPWIKI_VERSION); - if (is_array($oldpagehash)) - { - return $head . MimeMultipart(array(MimeifyPage($oldpagehash), - MimeifyPage($pagehash))); + $iter = $page->getAllRevisions(); + $parts = array(); + while ($revision = $iter->next()) { + $parts[] = MimeifyPageRevision($revision); + if ($nversions > 0 && count($parts) >= $nversions) + break; } - - return $head . MimeifyPage($pagehash); + if (count($parts) > 1) + return $head . MimeMultipart($parts); + assert($parts); + return $head . $parts[0]; } /** @@ -52,92 +59,89 @@ function MailifyPage ($pagehash, $oldpagehash = false) * is included in the zip file; otherwise all archived versions are * included as well. */ -function MakeWikiZip ($dbi, $include_archive = false) +function MakeWikiZip ($dbi, $request) { - global $WikiPageStore, $ArchivePageStore; - - $pages = GetAllWikiPageNames($dbi); - $zipname = "wiki.zip"; - - if ($include_archive) { - $zipname = "wikidb.zip"; - } - - $zip = new ZipWriter("Created by PhpWiki", $zipname); - - for (reset($pages); $pagename = current($pages); next($pages)) - { - set_time_limit(30); // Reset watchdog. - $pagehash = RetrievePage($dbi, $pagename, $WikiPageStore, 0); - - if (! is_array($pagehash)) - continue; - - if ($include_archive) - $oldpagehash = RetrievePage($dbi, $pagename, $ArchivePageStore, 0); - else - $oldpagehash = false; - - $attrib = array('mtime' => $pagehash['lastmodified'], - 'is_ascii' => 1); - if (($pagehash['flags'] & FLAG_PAGE_LOCKED) != 0) - $attrib['write_protected'] = 1; - - $content = MailifyPage($pagehash, $oldpagehash); + if ($request->getArg('include') == 'all') { + $zipname = "wikidb.zip"; + $include_archive = true; + } + else { + $zipname = "wiki.zip"; + $include_archive = false; + } + + + + $zip = new ZipWriter("Created by PhpWiki", $zipname); + + $pages = $dbi->getAllPages(); + while ($page = $pages->next()) { + set_time_limit(30); // Reset watchdog. + + $current = $page->getCurrentRevision(); + if ($current->getVersion() == 0) + continue; + + + $attrib = array('mtime' => $current->get('mtime'), + 'is_ascii' => 1); + if ($page->get('locked')) + $attrib['write_protected'] = 1; + + if ($include_archive) + $content = MailifyPage($page, 0); + else + $content = MailifyPage($page); - $zip->addRegularFile( rawurlencode($pagehash['pagename']), - $content, $attrib); - } - $zip->finish(); + $zip->addRegularFile( rawurlencode($page->getName()), + $content, $attrib); + } + $zip->finish(); } -function DumpToDir ($dbi, $directory) +function DumpToDir ($dbi, $request) { - global $WikiPageStore; - - if (empty($directory)) - ExitWiki(gettext("You must specify a directory to dump to")); + $directory = $request->getArg('directory'); + if (empty($directory)) + ExitWiki(gettext("You must specify a directory to dump to")); - // see if we can access the directory the user wants us to use - if (! file_exists($directory)) { - if (! mkdir($directory, 0755)) - ExitWiki("Cannot create directory '$directory'
    \n"); - else - $html = "Created directory '$directory' for the page dump...
    \n"; - } else { - $html = "Using directory '$directory'
    \n"; - } - - StartLoadDump("Dumping Pages", $html); + // see if we can access the directory the user wants us to use + if (! file_exists($directory)) { + if (! mkdir($directory, 0755)) + ExitWiki("Cannot create directory '$directory'
    \n"); + else + $html = "Created directory '$directory' for the page dump...
    \n"; + } else { + $html = "Using directory '$directory'
    \n"; + } + + StartLoadDump("Dumping Pages", $html); - $pages = GetAllWikiPagenames($dbi); - - while (list ($i, $pagename) = each($pages)) - { - $enc_name = htmlspecialchars($pagename); - $filename = rawurlencode($pagename); - - echo "
    $enc_name ... "; - if($pagename != $filename) - echo "saved as $filename ... "; - - $page = RetrievePage($dbi, $pagename, $WikiPageStore, 0); - - //$data = serialize($page); - $data = MailifyPage($page); + $pages = $dbi->getAllPages(); + + while ($page = $pages->next()) { + + $enc_name = htmlspecialchars($page->getName()); + $filename = rawurlencode($page->getName()); + + echo "
    $enc_name ... "; + if($pagename != $filename) + echo "saved as $filename ... "; + + $data = MailifyPage($page); - if ( !($fd = fopen("$directory/$filename", "w")) ) - ExitWiki("couldn't open file '$directory/$filename' for writing\n"); + if ( !($fd = fopen("$directory/$filename", "w")) ) + ExitWiki("couldn't open file '$directory/$filename' for writing\n"); - $num = fwrite($fd, $data, strlen($data)); - echo "$num bytes written\n"; - flush(); + $num = fwrite($fd, $data, strlen($data)); + echo "$num bytes written\n"; + flush(); - assert($num == strlen($data)); - fclose($fd); - } + assert($num == strlen($data)); + fclose($fd); + } - EndLoadDump(); + EndLoadDump(); } //////////////////////////////////////////////////////////////// @@ -146,84 +150,117 @@ function DumpToDir ($dbi, $directory) // //////////////////////////////////////////////////////////////// -function SavePage ($dbi, $page, $defaults, $source, $filename) +function SavePage ($dbi, $pageinfo, $source, $filename) { - global $WikiPageStore; - - // Fill in defaults for missing values? - // Should we do more sanity checks here? - while (list($key, $val) = each($defaults)) - if (empty($page[$key])) - $page[$key] = $val; - - $pagename = $page['pagename']; - - if (empty($pagename)) - { - echo Element('dd'). Element('dt', QElement('b', "Empty pagename!")); - return; - } - - - $mesg = array(); - $version = $page['version']; - $isnew = true; - - if ($version) - $mesg[] = sprintf(gettext("version %s"), $version); - if ($source) - $mesg[] = sprintf(gettext("from %s"), $source); - - if (is_array($current = RetrievePage($dbi, $pagename, $WikiPageStore, 0))) - { - $isnew = false; - - if (arrays_equal($current['content'], $page['content']) - && $current['author'] == $page['author'] - && $current['flags'] == $page['flags']) - { - $mesg[] = sprintf(gettext("is identical to current version %d"), - $current['version']); - - if ( $version <= $current['version'] ) - { - $mesg[] = gettext("- skipped"); - $page = false; - } - } - else - { - SavePageToArchive($pagename, $current); - - if ( $version <= $current['version'] ) - $page['version'] = $current['version'] + 1; - } - } - else if ($page['version'] < 1) - $page['version'] = 1; + $pagedata = $pageinfo['pagedata']; // Page level meta-data. + $versiondata = $pageinfo['versiondata']; // Revision level meta-data. + + if (empty($pageinfo['pagename'])) { + echo Element('dd'). Element('dt', QElement('b', "Empty pagename!")); + return; + } + + if (empty($versiondata['author_id'])) + $versiondata['author_id'] = $versiondata['author']; + + $pagename = $pageinfo['pagename']; + $content = $pageinfo['content']; + + $page = $dbi->getPage($pagename); + + foreach ($pagedata as $key => $value) { + if (!empty($value)) + $page->set($key, $value); + } + + $mesg = array(); + $skip = false; + if ($source) + $mesg[] = sprintf(gettext("from %s"), $source); + + $current = $page->getCurrentRevision(); + if ($current->getVersion() == 0) { + $mesg[] = gettext("new page"); + $isnew = true; + } + else { + if ($current->getPackedContent() == $content + && $current->get('author') == $versiondata['author']) { + $mesg[] = sprintf(gettext("is identical to current version %d"), + $current->getVersion()); + $mesg[] = gettext("- skipped"); + $skip = true; + } + $isnew = false; + } + + if (! $skip) { + $new = $page->createRevision(WIKIDB_FORCE_CREATE, $content, + $versiondata, + ExtractWikiPageLinks($content)); + + $mesg[] = gettext("- saved"); + $mesg[] = sprintf(gettext("- saved as version %d"), $new->getVersion()); + } - - if ($page) - { - ReplaceCurrentPage($pagename, $page); - UpdateRecentChanges($dbi, $pagename, $isnew); - - $mesg[] = gettext("- saved"); - if ($version != $page['version']) - $mesg[] = sprintf(gettext("as version %d"), $page['version']); - } - - print( Element('dt', LinkExistingWikiWord($pagename)) - . QElement('dd', join(" ", $mesg)) - . "\n" ); - flush(); + print( Element('dt', LinkExistingWikiWord($pagename)) + . QElement('dd', join(" ", $mesg)) + . "\n" ); + flush(); } -function ParseSerializedPage($text) +function ParseSerializedPage($text, $default_pagename) { - if (!preg_match('/^a:\d+:{[si]:\d+/', $text)) - return false; - return unserialize($text); + if (!preg_match('/^a:\d+:{[si]:\d+/', $text)) + return false; + + $pagehash = unserialize($text); + + // Split up pagehash into four parts: + // pagename + // content + // page-level meta-data + // revision-level meta-data + + if (!defined('FLAG_PAGE_LOCKED')) + define('FLAG_PAGE_LOCKED', 1); + $pageinfo = array('pagedata' => array(), + 'versiondata' => array()); + + $pagedata = &$pageinfo['pagedata']; + $versiondata = &$pageinfo['versiondata']; + + // Fill in defaults. + if (empty($pagehash['pagename'])) + $pagehash['pagename'] = $default_pagename; + if (empty($pagehash['author'])) + $pagehash['author'] = $GLOBALS['user']->id(); + + + foreach ($pagehash as $key => $value) { + switch($key) { + case 'pagename': + case 'version': + $pageinfo[$key] = $value; + break; + case 'content': + $pageinfo[$key] = join("\n", $value); + case 'flags': + if (($value & FLAG_PAGE_LOCKED) != 0) + $pagedata['locked'] = 'yes'; + break; + case 'created': + $pagedata[$key] = $value; + break; + case 'lastmodified': + $versiondata['mtime'] = $value; + break; + case 'author': + $versiondata[$key] = $value; + break; + } + } + return $pageinfo; } function SortByPageVersion ($a, $b) { @@ -232,45 +269,45 @@ function SortByPageVersion ($a, $b) { function LoadFile ($dbi, $filename, $text = false, $mtime = false) { - if (!is_string($text)) - { - // Read the file. - $stat = stat($filename); - $mtime = $stat[9]; - $text = implode("", file($filename)); - } + if (!is_string($text)) { + // Read the file. + $stat = stat($filename); + $mtime = $stat[9]; + $text = implode("", file($filename)); + } - set_time_limit(30); // Reset watchdog. + set_time_limit(30); // Reset watchdog. - // FIXME: basename("filewithnoslashes") seems to return garbage sometimes. - $basename = basename("/dummy/" . $filename); + // FIXME: basename("filewithnoslashes") seems to return garbage sometimes. + $basename = basename("/dummy/" . $filename); - if (!$mtime) - $mtime = time(); // Last resort. - - $defaults = array('author' => $GLOBALS['user']->id(), - 'pagename' => rawurldecode($basename), - 'flags' => 0, - 'version' => 0, - 'created' => $mtime, - 'lastmodified' => $mtime); - - if ( ($parts = ParseMimeifiedPages($text)) ) - { - usort($parts, 'SortByPageVersion'); - for (reset($parts); $page = current($parts); next($parts)) - SavePage($dbi, $page, $defaults, "MIME file $filename", $basename); - } - else if ( ($page = ParseSerializedPage($text)) ) - { - SavePage($dbi, $page, $defaults, "Serialized file $filename", $basename); - } - else - { - // Assume plain text file. - $page['content'] = preg_split('/[ \t\r]*\n/', chop($text)); - SavePage($dbi, $page, $defaults, "plain file $filename", $basename); - } + if (!$mtime) + $mtime = time(); // Last resort. + + $defaults = array('author' => $GLOBALS['user']->id(), + 'pagename' => rawurldecode($basename)); + + $default_pagename = rawurldecode($basename); + + if ( ($parts = ParseMimeifiedPages($text)) ) { + usort($parts, 'SortByPageVersion'); + foreach ($parts as $pageinfo) + SavePage($dbi, $pageinfo, "MIME file $filename", $basename); + } + else if ( ($pageinfo = ParseSerializedPage($text, $default_pagename)) ) { + SavePage($dbi, $pageinfo, "Serialized file $filename", $basename); + } + else { + // Assume plain text file. + $pageinfo = array('pagename' => $default_pagename, + 'pagedata' => array(), + 'versiondata' + => array('author' => $GLOBALS['user']->id()), + 'content' + => preg_replace('/[ \t\r]*\n/', "\n", chop($text)) + ); + SavePage($dbi, $pageinfo, "plain file $filename", $basename); + } } function LoadZip ($dbi, $zipfile, $files = false, $exclude = false) @@ -283,8 +320,8 @@ function LoadZip ($dbi, $zipfile, $files = false, $exclude = false) if ( ($files && !in_array($fn, $files)) || ($exclude && in_array($fn, $exclude)) ) { - print Element('dt', LinkExistingWikiWord($fn)) . QElement('dd', 'Skipping'); - continue; + print Element('dt', LinkExistingWikiWord($fn)) . QElement('dd', 'Skipping'); + continue; } LoadFile($dbi, $fn, $data, $attrib['mtime']); @@ -353,67 +390,65 @@ function LoadAny ($dbi, $file_or_dir, $files = false, $exclude = false) } } -function LoadFileOrDir ($dbi, $source) +function LoadFileOrDir ($dbi, $request) { + $source = $request->getArg('source'); StartLoadDump("Loading '$source'"); echo "
    \n"; - LoadAny($dbi, $source, false, array(gettext('RecentChanges'))); + LoadAny($dbi, $source/*, false, array(gettext('RecentChanges'))*/); echo "
    \n"; EndLoadDump(); } function SetupWiki ($dbi) { - global $GenericPages, $LANG, $user; + global $GenericPages, $LANG, $user; - //FIXME: This is a hack - $user->userid = 'The PhpWiki programming team'; + //FIXME: This is a hack + $user->userid = 'The PhpWiki programming team'; - StartLoadDump('Loading up virgin wiki'); - echo "
    \n"; + StartLoadDump('Loading up virgin wiki'); + echo "
    \n"; - $ignore = array(gettext('RecentChanges')); + LoadAny($dbi, FindLocalizedFile(WIKI_PGSRC)/*, false, $ignore*/); + if ($LANG != "C") + LoadAny($dbi, FindFile(DEFAULT_WIKI_PGSRC), $GenericPages/*, $ignore*/); - LoadAny($dbi, FindLocalizedFile(WIKI_PGSRC), false, $ignore); - if ($LANG != "C") - LoadAny($dbi, FindFile(DEFAULT_WIKI_PGSRC), $GenericPages, $ignore); - - echo "
    \n"; - EndLoadDump(); + echo "
    \n"; + EndLoadDump(); } -function LoadPostFile ($dbi, $postname) +function LoadPostFile ($dbi, $request) { - global $HTTP_POST_FILES; - - extract($HTTP_POST_FILES[$postname]); - fix_magic_quotes_gpc($tmp_name); - fix_magic_quotes_gpc($name); - - if (!is_uploaded_file($tmp_name)) - ExitWiki('Bad file post'); // Possible malicious attack. + $upload = $request->getUploadedFile('file'); + + if (!$upload) + ExitWiki('No uploade file to upload?'); + + // Dump http headers. + $fd = fopen($tmp_name, "rb"); + while ( ($header = fgets($fd, 4096)) ) + if (trim($header) == '') + break; + + StartLoadDump("Uploading " . $upload->getName()); + echo "
    \n"; - // Dump http headers. - $fd = fopen($tmp_name, "rb"); - while ( ($header = fgets($fd, 4096)) ) - if (trim($header) == '') - break; + if (IsZipFile($fd)) + LoadZip($dbi, $upload->open(), false, array(gettext('RecentChanges'))); + else + Loadfile($dbi, $upload->getName(), $upload->getContents()); - StartLoadDump("Uploading $name"); - echo "
    \n"; - - if (IsZipFile($fd)) - LoadZip($dbi, $fd, false, array(gettext('RecentChanges'))); - else - Loadfile($dbi, $name, fread($fd, MAX_UPLOAD_SIZE)); - - echo "
    \n"; - EndLoadDump(); + echo "
    \n"; + EndLoadDump(); } // For emacs users // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/main.php b/lib/main.php index 69adb8502..1bb73a0ec 100644 --- a/lib/main.php +++ b/lib/main.php @@ -1,213 +1,232 @@ write(ACCESS_LOG); } - register_shutdown_function('_write_log'); -} +//include "lib/logger.php"; +require_once('lib/Request.php'); +require_once("lib/WikiUser.php"); +require_once('lib/WikiDB.php'); -if (USE_PATH_INFO && !isset($PATH_INFO) - && (!isset($REDIRECT_URL) || !preg_match(',/$,', $REDIRECT_URL))) -{ - $LogEntry->status = 302; // "302 Found" - header("Location: " . SERVER_URL . preg_replace('/(\?|$)/', '/\1', $REQUEST_URI, 1)); - exit; -} - -function DeducePagename () -{ - global $pagename, $PATH_INFO, $QUERY_STRING; - - if (isset($pagename)) - return fix_magic_quotes_gpc($pagename); - if (USE_PATH_INFO && isset($PATH_INFO)) - { - fix_magic_quotes_gpc($PATH_INFO); - if (ereg('^' . PATH_INFO_PREFIX . '(..*)$', $PATH_INFO, $m)) - return $m[1]; - } +function deduce_pagename ($request) { + if ($request->getArg('pagename')) + return $request->getArg('pagename'); - if (isset($QUERY_STRING) && preg_match('/^[^&=]+$/', $QUERY_STRING)) - return urldecode(fix_magic_quotes_gpc($QUERY_STRING)); + if (USE_PATH_INFO) { + $pathinfo = $request->get('PATH_INFO'); + if (ereg('^' . PATH_INFO_PREFIX . '(..*)$', $pathinfo, $m)) + return $m[1]; + } - return gettext("HomePage"); + $query_string = $request->get('QUERY_STRING'); + if (preg_match('/^[^&=]+$/', $query_string)) + return urldecode($query_string); + + return gettext("HomePage"); } -$pagename = DeducePagename(); - -if (!empty($action)) -{ - $action = trim(fix_magic_quotes_gpc($action)); -} -else if (isset($diff)) -{ - // Fix for compatibility with very old diff links in RecentChanges. - // (The [phpwiki:?diff=PageName] style links are fixed elsewhere.) - $action = 'diff'; - $pagename = fix_magic_quotes_gpc($diff); - unset($diff); -} -else -{ - $action = 'browse'; +function is_safe_action ($action) { + if (! ZIPDUMP_AUTH and ($action == 'zip' || $action == 'xmldump')) + return true; + return in_array ( $action, array('browse', + 'info', 'diff', 'search', + 'edit', 'save', + 'login', 'logout', + 'setprefs') ); } -function IsSafeAction ($action) -{ - if (! ZIPDUMP_AUTH and $action == 'zip') - return true; - return in_array ( $action, array('browse', - 'info', 'diff', 'search', - 'edit', 'save', - 'login', 'logout', - 'setprefs') ); +function get_auth_mode ($action) { + switch ($action) { + case 'logout': + return 'LOGOUT'; + case 'login': + return 'LOGIN'; + default: + if (is_safe_action($action)) + return 'ANON_OK'; + else + return 'REQUIRE_AUTH'; + } } -function get_auth_mode ($action) -{ - switch ($action) { - case 'logout': - return 'LOGOUT'; - case 'login': - return 'LOGIN'; - default: - if (IsSafeAction($action)) - return 'ANON_OK'; - else - return 'REQUIRE_AUTH'; - } -} +function main ($request) { + + if (USE_PATH_INFO && ! $request->get('PATH_INFO') + && ! preg_match(',/$,', $request->get('REDIRECT_URL'))) { + $request->redirect(SERVER_URL + . preg_replace('/(\?|$)/', '/\1', + $request->get('REQUEST_URI'), + 1)); + exit; + } + + $request->setArg('pagename', deduce_pagename($request)); + global $pagename; // FIXME: can we make this non-global? + $pagename = $request->getArg('pagename'); + + $action = $request->getArg('action'); + if (!$action) + $action = 'browse'; + + global $user; // FIXME: can we make this non-global? + $user = new WikiUser($request, get_auth_mode($action)); + //FIXME: + //if ($user->is_authenticated()) + // $LogEntry->user = $user->id(); + + // All requests require the database + global $dbi; // FIXME: can we keep this non-global? + $dbi = WikiDB::open($GLOBALS['DBParams']); + + if ( $action == 'browse' && $request->getArg('pagename') == gettext("HomePage") ) { + // if there is no HomePage, create a basic set of Wiki pages + if ( ! $dbi->isWikiPage(gettext("HomePage")) ) { + include_once("lib/loadsave.php"); + SetupWiki($dbi); + ExitWiki(); + } + } + + // FIXME: I think this is redundant. + if (!is_safe_action($action)) + $user->must_be_admin($action); + + if (isset($DisabledActions) && in_array($action, $DisabledActions)) + ExitWiki(sprintf(gettext("Action %s is disabled in this wiki."), $action)); -$user = new WikiUser(get_auth_mode($action)); -if ($user->is_authenticated()) - $LogEntry->user = $user->id(); - - - -// All requests require the database -$dbi = OpenDataBase($WikiPageStore); + // Enable the output of most of the warning messages. + // The warnings will screw up zip files and setpref though. + if ($action != 'zip' && $action != 'setprefs') { + global $ErrorManager; + $ErrorManager->setPostponedErrorMask(E_NOTICE|E_USER_NOTICE); + } -if ( $action == 'browse' && $pagename == gettext("HomePage") ) { - // if there is no HomePage, create a basic set of Wiki pages - if ( ! IsWikiPage($dbi, gettext("HomePage")) ) { - include_once("lib/loadsave.php"); - SetupWiki($dbi); - ExitWiki(); - } -} - -// FIXME: I think this is redundant. -if (!IsSafeAction($action)) - $user->must_be_admin($action); -if (isset($DisabledActions) && in_array($action, $DisabledActions)) - ExitWiki(gettext("Action $action is disabled in this wiki.")); - -// Enable the output of most of the warning messages. -// The warnings will screw up zip files and setpref though. -if ($action != 'zip' && $action != 'setprefs') - PostponeErrorMessages(E_NOTICE); - -switch ($action) { - case 'edit': - include "lib/editpage.php"; - break; - - case 'search': - if (isset($searchtype) && ($searchtype == 'full')) { - include "lib/fullsearch.php"; - } - else { - include "lib/search.php"; - } - break; + + switch ($action) { + case 'edit': + $request->compress_output(); + include "lib/editpage.php"; + editPage($dbi, $request); + break; + + case 'search': + $request->compress_output(); + if ($request->getArg('searchtype') == 'full') { + include "lib/fullsearch.php"; + } + else { + include "lib/search.php"; + } + break; + + case 'save': + $request->compress_output(); + include "lib/savepage.php"; + savePage($dbi, $request); + break; + case 'info': + $request->compress_output(); + include "lib/pageinfo.php"; + break; + case 'diff': + $request->compress_output(); + include_once "lib/diff.php"; + showDiff($dbi, $request); + break; - case 'save': - include "lib/savepage.php"; - break; - case 'info': - include "lib/pageinfo.php"; - break; - case 'diff': - include "lib/diff.php"; - break; + case 'zip': + include_once("lib/loadsave.php"); + MakeWikiZip($dbi, $request); + // I don't think it hurts to add cruft at the end of the zip file. + echo "\n========================================================\n"; + echo "PhpWiki " . PHPWIKI_VERSION . " source:\n$GLOBALS[RCS_IDS]\n"; + break; + + case 'xmldump': + // FIXME: + $limit = 1; + if ($request->getArg('include') == 'all') + $limit = 0; + require_once("lib/libxml.php"); + $xmlwriter = new WikiXmlWriter; + $xmlwriter->begin(); + $xmlwriter->writeComment("PhpWiki " . PHPWIKI_VERSION . " source:\n$RCS_IDS\n"); + $xmlwriter->writeDatabase($dbi, $limit); + $xmlwriter->end(); + break; - case 'zip': - include_once("lib/loadsave.php"); - MakeWikiZip($dbi, isset($include) && $include == 'all'); - // I don't think it hurts to add cruft at the end of the zip file. - echo "\n========================================================\n"; - echo "PhpWiki " . PHPWIKI_VERSION . " source:\n$RCS_IDS\n"; - break; - - case 'upload': - include_once("lib/loadsave.php"); - LoadPostFile($dbi, 'file'); - break; + case 'upload': + include_once("lib/loadsave.php"); + LoadPostFile($dbi, $request); + break; - case 'dumpserial': - if (empty($directory)) - ExitWiki(gettext("You must specify a directory to dump to")); - - include_once("lib/loadsave.php"); - DumpToDir($dbi, fix_magic_quotes_gpc($directory)); - break; - - case 'loadfile': - if (empty($source)) - ExitWiki(gettext("You must specify a source to read from")); - - include_once("lib/loadsave.php"); - LoadFileOrDir($dbi, fix_magic_quotes_gpc($source)); - break; - - case 'remove': - include 'admin/removepage.php'; - break; + case 'dumpserial': + include_once("lib/loadsave.php"); + DumpToDir($dbi, $request); + break; + + case 'loadfile': + include_once("lib/loadsave.php"); + LoadFileOrDir($dbi, $request); + break; + + case 'remove': + include 'admin/removepage.php'; + break; - case 'lock': - case 'unlock': - include "admin/lockpage.php"; - include "lib/display.php"; - break; - - case 'setprefs': - $prefs = $user->getPreferences($GLOBALS); - if (!empty($edit_area_width)) - $prefs['edit_area.width'] = $edit_area_width; - if (!empty($edit_area_height)) - $prefs['edit_area.height'] = $edit_area_height; - $user->setPreferences($prefs); - - PostponeErrorMessages(E_ALL & ~E_NOTICE); - - include "lib/display.php"; - break; + case 'lock': + case 'unlock': + $user->must_be_admin("lock or unlock pages"); + $page = $dbi->getPage($request->getArg('pagename')); + $page->set('locked', $action == 'lock'); + + $request->compress_output(); + include_once("lib/display.php"); + displayPage($dbi, $request); + break; + + case 'setprefs': + $prefs = $user->getPreferences(); + $edit_area_width = $request->getArg('edit_area_width'); + $edit_area_height = $request->getArg('edit_area_height'); + if ($edit_area_width) + $prefs['edit_area.width'] = $edit_area_width; + if ($edit_area_height) + $prefs['edit_area.height'] = $edit_area_height; + $user->setPreferences($prefs); + $ErrorManager->setPostponedErrorMask(E_ALL & ~E_NOTICE); + + $request->compress_output(); + include_once("lib/display.php"); + displayPage($dbi, $request); + break; - case 'browse': - case 'login': - case 'logout': - include "lib/display.php"; - break; - - default: - echo QElement('p', sprintf("Bad action: '%s'", urlencode($action))); - break; + case 'browse': + case 'login': + case 'logout': + $request->compress_output(); + include_once("lib/display.php"); + displayPage($dbi, $request); + break; + + default: + echo QElement('p', sprintf("Bad action: '%s'", urlencode($action))); + break; + } + ExitWiki(); } -ExitWiki(); +$request = new Request; +main($request); + -// For emacs users // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/msql.php b/lib/msql.php deleted file mode 100644 index c6bb5e800..000000000 --- a/lib/msql.php +++ /dev/null @@ -1,531 +0,0 @@ -"; - $msg .= sprintf(gettext ("Error message: %s"), msql_error()); - ExitWiki($msg); - } - if (!msql_select_db($database, $dbc)) { - $msg = sprintf(gettext ("Cannot open database %s, giving up."), - $database); - $msg .= "
    "; - $msg .= sprintf(gettext ("Error message: %s"), msql_error()); - ExitWiki($msg); - } - - $dbi['dbc'] = $dbc; - $dbi['table'] = $dbinfo['table']; // page metadata - $dbi['page_table'] = $dbinfo['page_table']; // page content - return $dbi; - } - - - function CloseDataBase($dbi) { - // I found msql_pconnect unstable so we go the slow route. - return msql_close($dbi['dbc']); - } - - - // This should receive the full text of the page in one string - // It will break the page text into an array of strings - // of length MSQL_MAX_LINE_LENGTH which should match the length - // of the columns wikipages.LINE, archivepages.LINE in schema.minisql - - function msqlDecomposeString($string) { - $ret_arr = array(); - - // initialize the array to satisfy E_NOTICE - for ($i = 0; $i < MSQL_MAX_LINE_LENGTH; $i++) { - $ret_arr[$i] = ""; - } - $el = 0; - - // zero, one, infinity - // account for the small case - if (strlen($string) < MSQL_MAX_LINE_LENGTH) { - $ret_arr[$el] = $string; - return $ret_arr; - } - - $words = array(); - $line = $string2 = ""; - - // split on single spaces - $words = preg_split("/ /", $string); - $num_words = count($words); - - reset($words); - $ret_arr[0] = $words[0]; - $line = " $words[1]"; - - // for all words, build up lines < MSQL_MAX_LINE_LENGTH in $ret_arr - for ($x = 2; $x < $num_words; $x++) { - $length = strlen($line) + strlen($words[$x]) - + strlen($ret_arr[$el]) + 1; - - if ($length < MSQL_MAX_LINE_LENGTH) { - $line .= " " . $words[$x]; - } else { - // put this line in the return array, reset, continue - $ret_arr[$el++] .= $line; - $line = " $words[$x]"; // reset - } - } - $ret_arr[$el] = $line; - return $ret_arr; - } - - - // Take form data and prepare it for the db - function MakeDBHash($pagename, $pagehash) - { - $pagehash["pagename"] = addslashes($pagename); - if (!isset($pagehash["flags"])) - $pagehash["flags"] = 0; - if (!isset($pagehash["content"])) { - $pagehash["content"] = array(); - } else { - $pagehash["content"] = implode("\n", $pagehash["content"]); - $pagehash["content"] = msqlDecomposeString($pagehash["content"]); - } - $pagehash["author"] = addslashes($pagehash["author"]); - if (empty($pagehash["refs"])) { - $pagehash["refs"] = ""; - } else { - $pagehash["refs"] = serialize($pagehash["refs"]); - } - - return $pagehash; - } - - - // Take db data and prepare it for display - function MakePageHash($dbhash) - { - // unserialize/explode content - $dbhash['refs'] = unserialize($dbhash['refs']); - return $dbhash; - } - - - // Return hash of page + attributes or default - function RetrievePage($dbi, $pagename, $pagestore) { - $pagename = addslashes($pagename); - $table = $pagestore['table']; - $pagetable = $pagestore['page_table']; - - $query = "select * from $table where pagename='$pagename'"; - // echo "

    query: $query

    "; - $res = msql_query($query, $dbi['dbc']); - if (msql_num_rows($res)) { - $dbhash = msql_fetch_array($res); - - $query = "select lineno,line from $pagetable " . - "where pagename='$pagename' " . - "order by lineno"; - - $msql_content = ""; - if ($res = msql_query($query, $dbi['dbc'])) { - $dbhash["content"] = array(); - while ($row = msql_fetch_array($res)) { - $msql_content .= $row["line"]; - } - $dbhash["content"] = explode("\n", $msql_content); - } - - return MakePageHash($dbhash); - } - return -1; - } - - - // Either insert or replace a key/value (a page) - function InsertPage($dbi, $pagename, $pagehash) { - - $pagehash = MakeDBHash($pagename, $pagehash); - // $pagehash["content"] is now an array of strings - // of MSQL_MAX_LINE_LENGTH - - // record the time of modification - $pagehash["lastmodified"] = time(); - - if (IsWikiPage($dbi, $pagename)) { - - $PAIRS = "author='$pagehash[author]'," . - "created=$pagehash[created]," . - "flags=$pagehash[flags]," . - "lastmodified=$pagehash[lastmodified]," . - "pagename='$pagehash[pagename]'," . - "refs='$pagehash[refs]'," . - "version=$pagehash[version]"; - - $query = "UPDATE $dbi[table] SET $PAIRS WHERE pagename='$pagename'"; - - } else { - // do an insert - // build up the column names and values for the query - - $COLUMNS = "author, created, flags, lastmodified, " . - "pagename, refs, version"; - - $VALUES = "'$pagehash[author]', " . - "$pagehash[created], $pagehash[flags], " . - "$pagehash[lastmodified], '$pagehash[pagename]', " . - "'$pagehash[refs]', $pagehash[version]"; - - - $query = "INSERT INTO $dbi[table] ($COLUMNS) VALUES($VALUES)"; - } - - // echo "

    Query: $query

    \n"; - - // first, insert the metadata - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Insert/update failed: %s"), msql_error()); - print "
    \n"; - } - - - // second, insert the page data - // remove old data from page_table - $query = "delete from $dbi[page_table] where pagename='$pagename'"; - // echo "Delete query: $query
    \n"; - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Delete on %s failed: %s"), $dbi[page_table], - msql_error()); - print "
    \n"; - } - - // insert the new lines - reset($pagehash["content"]); - - for ($x = 0; $x < count($pagehash["content"]); $x++) { - $line = addslashes($pagehash["content"][$x]); - $query = "INSERT INTO $dbi[page_table] " . - "(pagename, lineno, line) " . - "VALUES('$pagename', $x, '$line')"; - // echo "Page line insert query: $query
    \n"; - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Insert into %s failed: %s"), $dbi[page_table], - msql_error()); - print "
    \n"; - } - } - } - - - // for archiving pages to a separate table - function SaveCopyToArchive($dbi, $pagename, $pagehash) { - global $ArchivePageStore; - - $pagehash = MakeDBHash($pagename, $pagehash); - // $pagehash["content"] is now an array of strings - // of MSQL_MAX_LINE_LENGTH - - if (IsInArchive($dbi, $pagename)) { - - $PAIRS = "author='$pagehash[author]'," . - "created=$pagehash[created]," . - "flags=$pagehash[flags]," . - "lastmodified=$pagehash[lastmodified]," . - "pagename='$pagehash[pagename]'," . - "refs='$pagehash[refs]'," . - "version=$pagehash[version]"; - - $query = "UPDATE $ArchivePageStore[table] SET $PAIRS WHERE pagename='$pagename'"; - - } else { - // do an insert - // build up the column names and values for the query - - $COLUMNS = "author, created, flags, lastmodified, " . - "pagename, refs, version"; - - $VALUES = "'$pagehash[author]', " . - "$pagehash[created], $pagehash[flags], " . - "$pagehash[lastmodified], '$pagehash[pagename]', " . - "'$pagehash[refs]', $pagehash[version]"; - - - $query = "INSERT INTO archive ($COLUMNS) VALUES($VALUES)"; - } - - // echo "

    Query: $query

    \n"; - - // first, insert the metadata - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Insert/update failed: %s"), msql_error()); - print "
    \n"; - } - - // second, insert the page data - // remove old data from page_table - $query = "delete from $ArchivePageStore[page_table] where pagename='$pagename'"; - // echo "Delete query: $query
    \n"; - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Delete on %s failed: %s"), - $ArchivePageStore[page_table], msql_error()); - print "
    \n"; - } - - // insert the new lines - reset($pagehash["content"]); - - for ($x = 0; $x < count($pagehash["content"]); $x++) { - $line = addslashes($pagehash["content"][$x]); - $query = "INSERT INTO $ArchivePageStore[page_table] " . - "(pagename, lineno, line) " . - "VALUES('$pagename', $x, '$line')"; - // echo "Page line insert query: $query
    \n"; - $retval = msql_query($query, $dbi['dbc']); - if ($retval == false) { - printf(gettext ("Insert into %s failed: %s"), - $ArchivePageStore[page_table], msql_error()); - print "
    \n"; - } - } - - - } - - - function IsWikiPage($dbi, $pagename) { - $pagename = addslashes($pagename); - $query = "select pagename from wiki where pagename='$pagename'"; - // echo "Query: $query
    \n"; - if ($res = msql_query($query, $dbi['dbc'])) { - return(msql_affected_rows($res)); - } - } - - - function IsInArchive($dbi, $pagename) { - $pagename = addslashes($pagename); - $query = "select pagename from archive where pagename='$pagename'"; - // echo "Query: $query
    \n"; - if ($res = msql_query($query, $dbi['dbc'])) { - return(msql_affected_rows($res)); - } - } - - - - // setup for title-search - function InitTitleSearch($dbi, $search) { - $search = addslashes($search); - $query = "select pagename from $dbi[table] " . - "where pagename clike '%$search%' order by pagename"; - $res = msql_query($query, $dbi['dbc']); - - return $res; - } - - - // iterating through database - function TitleSearchNextMatch($dbi, $res) { - if($o = msql_fetch_object($res)) { - return $o->pagename; - } - else { - return 0; - } - } - - - // setup for full-text search - function InitFullSearch($dbi, $search) { - // select unique page names from wikipages, and then - // retrieve all pages that come back. - $search = addslashes($search); - $query = "select distinct pagename from $dbi[page_table] " . - "where line clike '%$search%' " . - "order by pagename"; - $res = msql_query($query, $dbi['dbc']); - - return $res; - } - - // iterating through database - function FullSearchNextMatch($dbi, $res) { - global $WikiPageStore; - if ($row = msql_fetch_row($res)) { - return RetrievePage($dbi, $row[0], $WikiPageStore); - } else { - return 0; - } - } - - //////////////////////// - // new database features - - - function IncreaseHitCount($dbi, $pagename) { - - $query = "select hits from hitcount where pagename='$pagename'"; - $res = msql_query($query, $dbi['dbc']); - if (msql_num_rows($res)) { - $hits = msql_result($res, 0, 'hits'); - $hits++; - $query = "update hitcount set hits=$hits where pagename='$pagename'"; - $res = msql_query($query, $dbi['dbc']); - - } else { - $query = "insert into hitcount (pagename, hits) " . - "values ('$pagename', 1)"; - $res = msql_query($query, $dbi['dbc']); - } - - return $res; - } - - function GetHitCount($dbi, $pagename) { - - $query = "select hits from hitcount where pagename='$pagename'"; - $res = msql_query($query, $dbi['dbc']); - if (msql_num_rows($res)) { - $hits = msql_result($res, 0, 'hits'); - } else { - $hits = "0"; - } - - return $hits; - } - - - - function InitMostPopular($dbi, $limit) { - - $query = "select * from hitcount " . - "order by hits desc, pagename limit $limit"; - - $res = msql_query($query, $dbi['dbc']); - - return $res; - } - - function MostPopularNextMatch($dbi, $res) { - - if ($hits = msql_fetch_array($res)) { - return $hits; - } else { - return 0; - } - } - - function GetAllWikiPageNames($dbi_) { - $res = msql_query("select pagename from wiki", $dbi['dbc']); - $rows = msql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $pages[$i] = msql_result($res, $i, 'pagename'); - } - return $pages; - } - - //////////////////////////////////////// - // functionality for the wikilinks table - - // takes a page name, returns array of scored incoming and outgoing links - -/* Not implemented yet. The code below was copied from mysql.php... - - function GetWikiPageLinks($dbi, $pagename) { - $links = array(); - $pagename = addslashes($pagename); - $res = msql_query("select wikilinks.topage, wikiscore.score from wikilinks, wikiscore where wikilinks.topage=wikiscore.pagename and wikilinks.frompage='$pagename' order by score desc, topage", $dbi['dbc']); - - $rows = msql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = msql_fetch_array($res); - $links['out'][] = array($out['topage'], $out['score']); - } - - $res = msql_query("select wikilinks.frompage, wikiscore.score from wikilinks, wikiscore where wikilinks.frompage=wikiscore.pagename and wikilinks.topage='$pagename' order by score desc, frompage", $dbi['dbc']); - $rows = msql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = msql_fetch_array($res); - $links['in'][] = array($out['frompage'], $out['score']); - } - - $res = msql_query("select distinct hitcount.pagename, hitcount.hits from wikilinks, hitcount where (wikilinks.frompage=hitcounts.pagename and wikilinks.topage='$pagename') or (wikilinks.topage=pagename and wikilinks.frompage='$pagename') order by hitcount.hits desc, wikilinks.pagename", $dbi['dbc']); - $rows = msql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = msql_fetch_array($res); - $links['popular'][] = array($out['pagename'], $out['hits']); - } - - return $links; - } - - - // takes page name, list of links it contains - // the $linklist is an array where the keys are the page names - function SetWikiPageLinks($dbi, $pagename, $linklist) { - $frompage = addslashes($pagename); - - // first delete the old list of links - msql_query("delete from wikilinks where frompage='$frompage'", - $dbi["dbc"]); - - // the page may not have links, return if not - if (! count($linklist)) - return; - // now insert the new list of links - while (list($topage, $count) = each($linklist)) { - $topage = addslashes($topage); - if($topage != $frompage) { - msql_query("insert into wikilinks (frompage, topage) " . - "values ('$frompage', '$topage')", $dbi["dbc"]); - } - } - - msql_query("delete from wikiscore", $dbi["dbc"]); - msql_query("insert into wikiscore select w1.topage, count(*) from wikilinks as w1, wikilinks as w2 where w2.topage=w1.frompage group by w1.topage", $dbi["dbc"]); - } -*/ - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/mysql.php b/lib/mysql.php deleted file mode 100644 index ff1d497cd..000000000 --- a/lib/mysql.php +++ /dev/null @@ -1,447 +0,0 @@ -"; - $msg .= sprintf(gettext ("MySQL error: %s"), mysql_error()); - ExitWiki($msg); - } - if (!mysql_select_db($database, $dbc)) { - $msg = sprintf(gettext ("Cannot open database %s, giving up."), $database); - $msg .= "
    "; - $msg .= sprintf(gettext ("MySQL error: %s"), mysql_error()); - ExitWiki($msg); - } - $dbi['dbc'] = $dbc; - $dbi['table'] = $dbname; - return $dbi; - } - - - function CloseDataBase($dbi) { - // NOP function - // mysql connections are established as persistant - // they cannot be closed through mysql_close() - } - - - // prepare $pagehash for storing in mysql - function MakeDBHash($pagename, $pagehash) - { - $pagehash["pagename"] = addslashes($pagename); - if (!isset($pagehash["flags"])) - $pagehash["flags"] = 0; - $pagehash["author"] = addslashes($pagehash["author"]); - $pagehash["content"] = implode("\n", $pagehash["content"]); - $pagehash["content"] = addslashes($pagehash["content"]); - if (!isset($pagehash["refs"])) - $pagehash["refs"] = array(); - $pagehash["refs"] = serialize($pagehash["refs"]); - - return $pagehash; - } - - - // convert mysql result $dbhash to $pagehash - function MakePageHash($dbhash) - { - // unserialize/explode content - $dbhash['refs'] = unserialize($dbhash['refs']); - $dbhash['content'] = explode("\n", $dbhash['content']); - return $dbhash; - } - - - // Return hash of page + attributes or default - function RetrievePage($dbi, $pagename, $pagestore, $version) { - $pagename = addslashes($pagename); - - $version = $version ? " and version=$version" : ''; - - if ($res = mysql_query("select * from $pagestore where pagename='$pagename'$version", $dbi['dbc'])) { - if ($dbhash = mysql_fetch_array($res)) { - return MakePageHash($dbhash); - } - } - - // if we reach this the query failed - return -1; - } - - - // Return all versions of a page as an array of page hashes - function RetrievePageVersions($dbi, $pagename, $curstore, $archstore) { - $pagename = addslashes($pagename); - if (($page[0] = RetrievePage($dbi, $pagename, $curstore, 0)) != -1) { - if ($res = mysql_query("select * from $archstore where pagename='$pagename' order by version desc", $dbi['dbc'])) { - while ($dbhash = mysql_fetch_array($res)) { - array_push($page, MakePageHash($dbhash)); - } - return $page; - } - } - return -1; - } - - - // Get maximum version number of a page in pagestore - function GetMaxVersionNumber($dbi, $pagename, $pagestore) { - $pagename = addslashes($pagename); - if ($res = mysql_query("select max(version) from $pagestore where pagename='$pagename'", $dbi['dbc'])) { - return mysql_result($res, 0); - } - return -1; - } - - - // Either insert or replace a key/value (a page) - function InsertPage($dbi, $pagename, $pagehash, $clobber) - { - $pagehash = MakeDBHash($pagename, $pagehash); - - $COLUMNS = "author, content, created, flags, " . - "lastmodified, pagename, refs, version"; - - $VALUES = "'$pagehash[author]', '$pagehash[content]', " . - "$pagehash[created], $pagehash[flags], " . - "$pagehash[lastmodified], '$pagehash[pagename]', " . - "'$pagehash[refs]', $pagehash[version]"; - - // Clobber existing page? - $clobber = $clobber ? 'replace' : 'insert'; - - if (!mysql_query("$clobber into $dbi[table] ($COLUMNS) values ($VALUES)", - $dbi['dbc'])) { - $msg = htmlspecialchars(sprintf(gettext ("Error writing page '%s'"), $pagename)); - $msg .= "
    "; - $msg .= htmlspecialchars(sprintf(gettext ("MySQL error: %s"), mysql_error())); - ExitWiki($msg); - } - } - - - // Adds to or replaces a page in the current pagestore - function ReplaceCurrentPage($pagename, $pagehash) { - global $WikiPageStore; - $dbi = OpenDataBase($WikiPageStore); - $linklist = ExtractWikiPageLinks($pagehash['content']); - SetWikiPageLinks($dbi, $pagename, $linklist); - InsertPage($dbi, $pagename, $pagehash, true); - } - - - // Adds a page to the archive pagestore - function SavePageToArchive($pagename, $pagehash) { - global $ArchivePageStore; - $dbi = OpenDataBase($ArchivePageStore); - InsertPage($dbi, $pagename, $pagehash, false); - } - - - // Returns store where version of page resides - function SelectStore($dbi, $pagename, $version, $curstore, $archstore) { - if ($version) { - if (IsVersionInWiki($dbi, $pagename, $version)) return $curstore; - elseif (IsVersionInArchive($dbi, $pagename, $version)) return $archstore; - else return -1; - } - elseif (IsWikiPage($dbi, $pagename)) return $curstore; - else return -1; - } - - - function IsVersionInWiki($dbi, $pagename, $version) { - $pagename = addslashes($pagename); - if ($res = mysql_query("select count(*) from $dbi[table] where pagename='$pagename' and version='$version'", $dbi['dbc'])) { - return mysql_result($res, 0); - } - return 0; - } - - function IsVersionInArchive($dbi, $pagename, $version) { - global $ArchivePageStore; - - $pagename = addslashes($pagename); - if ($res = mysql_query("select count(*) from $ArchivePageStore where pagename='$pagename' and version='$version'", $dbi['dbc'])) { - return mysql_result($res, 0); - } - return 0; - } - - - function IsWikiPage($dbi, $pagename) { - $pagename = addslashes($pagename); - if ($res = mysql_query("select count(*) from $dbi[table] where pagename='$pagename'", $dbi['dbc'])) { - return mysql_result($res, 0); - } - return 0; - } - - function IsInArchive($dbi, $pagename) { - global $ArchivePageStore; - - $pagename = addslashes($pagename); - if ($res = mysql_query("select count(*) from $ArchivePageStore where pagename='$pagename'", $dbi['dbc'])) { - return mysql_result($res, 0); - } - return 0; - } - - - function RemovePage($dbi, $pagename) { - global $WikiPageStore, $ArchivePageStore; - global $WikiLinksStore, $HitCountStore, $WikiScoreStore; - - $pagename = addslashes($pagename); - $msg = gettext ("Cannot delete '%s' from table '%s'"); - $msg .= "
    \n"; - $msg .= gettext ("MySQL error: %s"); - - if (!mysql_query("delete from $WikiPageStore where pagename='$pagename'", $dbi['dbc'])) - ExitWiki(sprintf($msg, $pagename, $WikiPageStore, mysql_error())); - - if (!mysql_query("delete from $ArchivePageStore where pagename='$pagename'", $dbi['dbc'])) - ExitWiki(sprintf($msg, $pagename, $ArchivePageStore, mysql_error())); - - if (!mysql_query("delete from $WikiLinksStore where frompage='$pagename'", $dbi['dbc'])) - ExitWiki(sprintf($msg, $pagename, $WikiLinksStore, mysql_error())); - - if (!mysql_query("delete from $HitCountStore where pagename='$pagename'", $dbi['dbc'])) - ExitWiki(sprintf($msg, $pagename, $HitCountStore, mysql_error())); - - if (!mysql_query("delete from $WikiScoreStore where pagename='$pagename'", $dbi['dbc'])) - ExitWiki(sprintf($msg, $pagename, $WikiScoreStore, mysql_error())); - } - - - function IncreaseHitCount($dbi, $pagename) - { - global $HitCountStore; - - $res = mysql_query("update $HitCountStore set hits=hits+1 where pagename='$pagename'", $dbi['dbc']); - - if (!mysql_affected_rows($dbi['dbc'])) { - $res = mysql_query("insert into $HitCountStore (pagename, hits) values ('$pagename', 1)", $dbi['dbc']); - } - - return $res; - } - - function GetHitCount($dbi, $pagename) - { - global $HitCountStore; - - $res = mysql_query("select hits from $HitCountStore where pagename='$pagename'", $dbi['dbc']); - if (mysql_num_rows($res)) - $hits = mysql_result($res, 0); - else - $hits = "0"; - - return $hits; - } - - function MakeSQLSearchClause($search, $column) - { - $search = addslashes(preg_replace("/\s+/", " ", $search)); - $term = strtok($search, ' '); - $clause = ''; - while($term) { - $word = "$term"; - if ($word[0] == '-') { - $word = substr($word, 1); - $clause .= "not ($column like '%$word%') "; - } else { - $clause .= "($column like '%$word%') "; - } - if ($term = strtok(' ')) - $clause .= 'and '; - } - return $clause; - } - - // setup for title-search - function InitTitleSearch($dbi, $search) { - $clause = MakeSQLSearchClause($search, 'pagename'); - $res = mysql_query("select pagename from $dbi[table] where $clause order by pagename", $dbi["dbc"]); - - return $res; - } - - - // iterating through database - function TitleSearchNextMatch($dbi, $res) { - if($o = mysql_fetch_object($res)) { - return $o->pagename; - } - else { - return 0; - } - } - - - // setup for full-text search - function InitFullSearch($dbi, $search) { - $clause = MakeSQLSearchClause($search, 'content'); - $res = mysql_query("select * from $dbi[table] where $clause", $dbi["dbc"]); - - return $res; - } - - // iterating through database - function FullSearchNextMatch($dbi, $res) { - if($hash = mysql_fetch_array($res)) { - return MakePageHash($hash); - } - else { - return 0; - } - } - - function InitMostPopular($dbi, $limit) { - global $HitCountStore; - $res = mysql_query("select * from $HitCountStore order by hits desc, pagename limit $limit", $dbi["dbc"]); - - return $res; - } - - function MostPopularNextMatch($dbi, $res) { - if ($hits = mysql_fetch_array($res)) - return $hits; - else - return 0; - } - - function GetAllWikiPageNames($dbi) { - global $WikiPageStore; - $res = mysql_query("select pagename from $WikiPageStore", $dbi["dbc"]); - $rows = mysql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $pages[$i] = mysql_result($res, $i); - } - return $pages; - } - - - //////////////////////////////////////// - // functionality for the wikilinks table - - // takes a page name, returns array of scored incoming and outgoing links - function GetWikiPageLinks($dbi, $pagename) { - global $WikiLinksStore, $WikiScoreStore, $HitCountStore; - $links = array(); - - $pagename = addslashes($pagename); - $res = mysql_query("select topage, score from $WikiLinksStore, $WikiScoreStore where topage=pagename and frompage='$pagename' order by score desc, topage"); - $rows = mysql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = mysql_fetch_array($res); - $links['out'][] = array($out['topage'], $out['score']); - } - - $res = mysql_query("select frompage, score from $WikiLinksStore, $WikiScoreStore where frompage=pagename and topage='$pagename' order by score desc, frompage"); - $rows = mysql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = mysql_fetch_array($res); - $links['in'][] = array($out['frompage'], $out['score']); - } - - $res = mysql_query("select distinct pagename, hits from $WikiLinksStore, $HitCountStore where (frompage=pagename and topage='$pagename') or (topage=pagename and frompage='$pagename') order by hits desc, pagename"); - $rows = mysql_num_rows($res); - for ($i = 0; $i < $rows; $i++) { - $out = mysql_fetch_array($res); - $links['popular'][] = array($out['pagename'], $out['hits']); - } - - return $links; - } - - - // takes page name, list of links it contains - // the $linklist is an array where the keys are the page names - function SetWikiPageLinks($dbi, $pagename, $linklist) { - global $WikiLinksStore, $WikiScoreStore; - - $frompage = addslashes($pagename); - - // first delete the old list of links - mysql_query("delete from $WikiLinksStore where frompage='$frompage'", - $dbi["dbc"]); - - // the page may not have links, return if not - if (! count($linklist)) - return; - // now insert the new list of links - while (list($topage, $count) = each($linklist)) { - $topage = addslashes($topage); - if($topage != $frompage) { - mysql_query("insert into $WikiLinksStore (frompage, topage) " . - "values ('$frompage', '$topage')", $dbi["dbc"]); - } - } - - // update pagescore - mysql_query("delete from $WikiScoreStore", $dbi["dbc"]); - mysql_query("insert into $WikiScoreStore select w1.topage, count(*) from $WikiLinksStore as w1, $WikiLinksStore as w2 where w2.topage=w1.frompage group by w1.topage", $dbi["dbc"]); - } - -/* more mysql queries: - -orphans: -select pagename from wiki left join wikilinks on pagename=topage where topage is NULL; -*/ - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/pageinfo.php b/lib/pageinfo.php index b84afd550..46b383a9f 100644 --- a/lib/pageinfo.php +++ b/lib/pageinfo.php @@ -1,25 +1,95 @@ getArg('pagename'); +$page = $dbi->getPage($pagename); + +$rows[] = Element('tr', + "\n" + . Element('th', 'Version') . "\n" + . Element('th', 'Newer') . "\n" + . Element('th', 'Older') . "\n" + . Element('th', 'Created') . "\n" + . Element('th', 'Summary') . "\n" + . Element('th', 'Author') . "\n" + ); // Get all versions of a page, then iterate over them to make version list -$pages = RetrievePageVersions($dbi, $pagename, $WikiPageStore, $ArchivePageStore); +$iter = $page->getAllRevisions(); $i = 0; -foreach ($pages as $pagehash) { - $row = "\n" . Element('td', array('align' => 'right'), QElement('a', array('href' => "$pagename?version=" . $pagehash['version']), $pagehash['version'])); - $row .= "\n" . Element('td', array('align' => 'center'), QElement('input', array('type' => 'radio', 'name' => 'ver2', 'value' => ($i ? $pagehash['version'] : 0), 'checked' => ($i ? false : true)))); - $row .= "\n" . Element('td', array('align' => 'center'), QElement('input', array('type' => 'radio', 'name' => 'ver1', 'value' => ($i ? $pagehash['version'] : 0), 'checked' => ($i++-1 ? false : true)))); - $row .= "\n" . QElement('td', strftime($datetimeformat, $pagehash['lastmodified'])); - $row .= "\n" . QElement('td', $pagehash['author']) . "\n"; - - $html .= Element('tr', $row) . "\n"; +$last_author_id = false; + +function bold_if($cond, $text) { + return (bool)$cond ? QElement('b', $text) : htmlspecialchars($text); +} + + +while ($rev = $iter->next()) { + $version = $rev->getVersion(); + $cols = array(); + $is_major_edit = ! $rev->get('is_minor_edit'); + + $cols[] = Element('td', array('align' => 'right'), + Element('a', array('href' + => WikiURL($pagename, + array('version' => $version))), + bold_if($is_major_edit, $version))); + + + $cols[] = Element('td', array('align' => 'center'), + QElement('input', array('type' => 'radio', + 'name' => 'version', + 'value' => $version, + 'checked' => $i == 0))); + + $cols[] = Element('td', array('align' => 'center'), + QElement('input', array('type' => 'radio', + 'name' => 'previous', + 'value' => $version, + 'checked' => $i++ == 1))); + + $cols[] = QElement('td', array('align' => 'right'), + strftime($datetimeformat, $rev->get('mtime')) + . "\xa0"); + + + $cols[] = Element('td', bold_if($is_major_edit, $rev->get('summary'))); + + $author_id = $rev->get('author_id'); + $cols[] = Element('td', bold_if($author_id !== $last_author_id, + $rev->get('author'))); + $last_author_id = $author_id; + $rows[] = Element('tr', "\n" . join("\n", $cols) . "\n"); } -$html = "\n" . Element('table', $html) . "\n" . Element('input', array('type' => 'hidden', 'name' => 'action', 'value' => 'diff')) . "\n" . Element('input', array('type' => 'submit', 'value' => 'Run Diff')) . "\n"; -$html = Element('form', array('method' => 'get', 'action' => $pagename), $html); +$table = ("\n" + . Element('table', join("\n", $rows)) . "\n" + . Element('input', array('type' => 'hidden', + 'name' => 'action', + 'value' => 'diff')) . "\n" + . Element('input', array('type' => 'submit', 'value' => 'Run Diff')) . "\n"); + +$formargs['action'] = USE_PATH_INFO ? WikiURL($pagename) : SCRIPT_NAME; +$formargs['method'] = 'post'; + +$html = Element('p', + htmlspecialchars(gettext("Currently archived versions of")) + . " " + . LinkExistingWikiWord($pagename)); +$html .= Element('form', $formargs, $table); + +echo GeneratePage('MESSAGE', $html, gettext("Revision History: ") . $pagename); + -echo GeneratePage('MESSAGE', $html, gettext("PageInfo").": '$pagename'", 0); +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: ?> diff --git a/lib/pgsql.php b/lib/pgsql.php deleted file mode 100644 index fb75cefa0..000000000 --- a/lib/pgsql.php +++ /dev/null @@ -1,475 +0,0 @@ -dbi after open: '$dbi' '$dbi[table]' '$dbi[dbc]'

    \n"; - return $dbi; - } - - - function CloseDataBase($dbi) { - // NOOP: we use persistent database connections - } - - - // Return hash of page + attributes or default - function RetrievePage($dbi, $pagename, $pagestore, $version) { - $pagename = addslashes($pagename); - $version = $version ? " and version=$version" : ''; - $query = "select * from $pagestore where pagename='$pagename'$version"; - // echo "

    $query

    "; - $res = pg_exec($dbi['dbc'], $query); - - if (pg_numrows($res)) { - if ($array = pg_fetch_array($res, 0)) { - while (list($key, $val) = each($array)) { - // pg_fetch_array gives us all the values twice, - // so we have to manually edit out the indices - if (gettype($key) == "integer") { - continue; - } - $pagehash[$key] = $val; - } - - // unserialize/explode content - $pagehash['refs'] = unserialize($pagehash['refs']); - $pagehash['content'] = explode("\n", $pagehash['content']); - - return $pagehash; - } - } - - // if we reach this the query failed - return -1; - } - - - // Return all versions of a page as an array of page hashes - function RetrievePageVersions($dbi, $pagename, $curstore, $archstore) { - $pagename = addslashes($pagename); - if (($page[0] = RetrievePage($dbi, $pagename, $curstore, 0)) != -1) { - $res = pg_exec($dbi['dbc'], "select * from $archstore where pagename='$pagename' order by version desc"); - if (pg_numrows($res)) { - while ($array = pg_fetch_array($res, 0)) { - while (list($key, $val) = each($array)) { - if (gettype($key) == "integer") { - continue; - } - $dbhash[$key] = $val; - } - - $dbhash['refs'] = unserialize($dbhash['refs']); - $dbhash['content'] = explode("\n", $dbhash['content']); - - array_push($page, $dbhash); - } - - return $page; - } - } - - // if we reach this the query failed - return -1; - } - - - // Get maximum version number of a page in pagestore - function GetMaxVersionNumber($dbi, $pagename, $pagestore) { - $pagename = addslashes($pagename); - if ($res = pg_exec($dbi['dbc'], "select max(version) from $pagestore where pagename='$pagename'")) { - return pg_result($res, 0, "version"); - } - return -1; - } - - - // Either insert or replace a key/value (a page) - function InsertPage($dbi, $pagename, $pagehash, $clobber) { - $pagename = addslashes($pagename); - - // update the wikilinks table - $linklist = ExtractWikiPageLinks($pagehash['content']); - SetWikiPageLinks($dbi, $pagename, $linklist); - - - // prepare the content for storage - if (!isset($pagehash["pagename"])) - $pagehash["pagename"] = $pagename; - if (!isset($pagehash["flags"])) - $pagehash["flags"] = 0; - $pagehash["author"] = addslashes($pagehash["author"]); - $pagehash["content"] = implode("\n", $pagehash["content"]); - $pagehash["content"] = addslashes($pagehash["content"]); - $pagehash["pagename"] = addslashes($pagehash["pagename"]); - $pagehash["refs"] = serialize($pagehash["refs"]); - - // Check for empty variables which can cause a sql error - if(empty($pagehash["created"])) - $pagehash["created"] = time(); - if(empty($pagehash["version"])) - $pagehash["version"] = 1; - - // record the time of modification - $pagehash["lastmodified"] = time(); - - // Clobber existing page? - $clobber = $clobber ? 'replace' : 'insert'; - - $COLUMNS = "author, content, created, flags, " . - "lastmodified, pagename, refs, version"; - - $VALUES = "'$pagehash[author]', '$pagehash[content]', " . - "$pagehash[created], $pagehash[flags], " . - "$pagehash[lastmodified], '$pagehash[pagename]', " . - "'$pagehash[refs]', $pagehash[version]"; - - if (!pg_exec($dbi['dbc'], "$clobber into $dbi[table] ($COLUMNS) values ($VALUES)")) { - $msg = htmlspecialchars(sprintf(gettext("Error writing page '%s'"), $pagename)); - $msg .= "
    "; - $msg .= htmlspecialchars(sprintf(gettext("PostgreSQL error: %s"), pg_errormessage($dbi['dbc']))); - ExitWiki($msg); - } - } - - - // Adds a page to the archive pagestore - function SavePageToArchive($pagename, $pagehash) { - global $ArchivePageStore; - $dbi = OpenDataBase($ArchivePageStore); - InsertPage($dbi, $pagename, $pagehash, false); - } - - - // Returns store where version of page resides - function SelectStore($dbi, $pagename, $version, $curstore, $archstore) { - if ($version) { - if (IsVersionInWiki($dbi, $pagename, $version)) return $curstore; - elseif (IsVersionInArchive($dbi, $pagename, $version)) return $archstore; - else return -1; - } - elseif (IsWikiPage($dbi, $pagename)) return $curstore; - else return -1; - } - - - function IsVersionInWiki($dbi, $pagename, $version) { - $pagename = addslashes($pagename); - if ($res = pg_exec($dbi['dbc'], "select count(*) from $dbi[table] where pagename='$pagename' and version='$version'")) { - return pg_result($res, 0, "count"); - } - return 0; - } - - - function IsVersionInArchive($dbi, $pagename, $version) { - global $ArchivePageStore; - - $pagename = addslashes($pagename); - if ($res = pg_exec($dbi['dbc'], "select count(*) from $ArchivePageStore where pagename='$pagename' and version='$version'")) { - return pg_result($res, 0, "count"); - } - return 0; - } - - - function IsWikiPage($dbi, $pagename) { - $pagename = addslashes($pagename); - if ($res = pg_exec($dbi['dbc'], "select count(*) from $dbi[table] where pagename='$pagename'")) { - return pg_result($res, 0, "count"); - } - return 0; - } - - - function IsInArchive($dbi, $pagename) { - global $ArchivePageStore; - - $pagename = addslashes($pagename); - if ($res = pg_exec($dbi['dbc'], "select count(*) from $ArchivePageStore where pagename='$pagename'")) { - return pg_result($res, 0, "count"); - } - return 0; - } - - - function RemovePage($dbi, $pagename) { - global $WikiPageStore, $ArchivePageStore; - global $WikiLinksStore, $HitCountStore, $WikiScoreStore; - - $pagename = addslashes($pagename); - $msg = gettext ("Cannot delete '%s' from table '%s'"); - $msg .= "
    \n"; - $msg .= gettext ("PostgreSQL error: %s"); - - if (!pg_exec($dbi['dbc'], "delete from $WikiPageStore where pagename='$pagename'")) - ExitWiki(sprintf($msg, $pagename, $WikiPageStore, pg_errormessage())); - - if (!pg_exec($dbi['dbc'], "delete from $ArchivePageStore where pagename='$pagename'")) - ExitWiki(sprintf($msg, $pagename, $ArchivePageStore, pg_errormessage())); - - if (!pg_exec($dbi['dbc'], "delete from $WikiLinksStore where frompage='$pagename'")) - ExitWiki(sprintf($msg, $pagename, $WikiLinksStore, pg_errormessage())); - - if (!pg_exec($dbi['dbc'], "delete from $HitCountStore where pagename='$pagename'")) - ExitWiki(sprintf($msg, $pagename, $HitCountStore, pg_errormessage())); - - if (!pg_exec($dbi['dbc'], "delete from $WikiScoreStore where pagename='$pagename'")) - ExitWiki(sprintf($msg, $pagename, $WikiScoreStore, mysql_error())); - } - - - // setup for title-search - function InitTitleSearch($dbi, $search) { - - global $search_counter; - $search_counter = 0; - - $search = strtolower($search); - $search = addslashes($search); - $query = "select pagename from $dbi[table] where lower(pagename) " . - "like '%$search%' order by pagename"; - //echo "search query: $query
    \n"; - $res = pg_exec($dbi["dbc"], $query); - - return $res; - } - - - // iterating through database - function TitleSearchNextMatch($dbi, $res) { - global $search_counter; - if($o = @pg_fetch_object($res, $search_counter)) { - $search_counter++; - return $o->pagename; - } else { - return 0; - } - } - - - // setup for full-text search - function InitFullSearch($dbi, $search) { - global $search_counter; - $search_counter = 0; - $search = strtolower($search); - $search = addslashes($search); - $search = addslashes($search); - $query = "select pagename,content from $dbi[table] " . - "where lower(content) like '%$search%'"; - - $res = pg_exec($dbi["dbc"], $query); - - return $res; - } - - // iterating through database - function FullSearchNextMatch($dbi, $res) { - global $search_counter; - if ($hash = @pg_fetch_array($res, $search_counter)) { - $search_counter++; - $page['pagename'] = $hash["pagename"]; - $page['content'] = explode("\n", $hash["content"]); - return $page; - } - else { - return 0; - } - } - - - //////////////////////// - // new database features - - - function IncreaseHitCount($dbi, $pagename) { - global $HitCountPageStore; - $query = "update $HitCountPageStore set hits=hits+1 where pagename='$pagename'"; - $res = pg_exec($dbi['dbc'], $query); - - if (!pg_cmdtuples($res)) { - $query = "insert into $HitCountPageStore (pagename, hits) " . - "values ('$pagename', 1)"; - $res = pg_exec($dbi['dbc'], $query); - } - - return $res; - } - - function GetHitCount($dbi, $pagename) { - global $HitCountPageStore; - $query = "select hits from $HitCountPageStore where pagename='$pagename'"; - $res = pg_exec($dbi['dbc'], $query); - if (pg_cmdtuples($res)) { - $hits = pg_result($res, 0, "hits"); - } else { - $hits = "0"; - } - - return $hits; - } - - - - function InitMostPopular($dbi, $limit) { - - global $pg_most_pop_ctr, $HitCountPageStore; - $pg_most_pop_ctr = 0; - - $query = "select * from $HitCountPageStore " . - "order by hits desc, pagename limit $limit"; - $res = pg_exec($dbi['dbc'], $query); - return $res; - } - - function MostPopularNextMatch($dbi, $res) { - - global $pg_most_pop_ctr; - if ($hits = @pg_fetch_array($res, $pg_most_pop_ctr)) { - $pg_most_pop_ctr++; - return $hits; - } else { - return 0; - } - } - - function GetAllWikiPageNames($dbi) { - global $WikiPageStore; - $res = pg_exec($dbi['dbc'], "select pagename from $WikiPageStore"); - $rows = pg_numrows($res); - for ($i = 0; $i < $rows; $i++) { - $pages[$i] = pg_result($res, $i, "pagename"); - } - return $pages; - } - - //////////////////////////////////////// - // functionality for the wikilinks table - - // takes a page name, returns array of links - function GetWikiPageLinks($dbi, $pagename) { - global $WikiLinksPageStore; - $pagename = addslashes($pagename); - - $res = pg_exec("select topage, score from wikilinks, wikiscore where topage=pagename and frompage='$pagename' order by score desc, topage"); - $rows = pg_numrows($res); - for ($i = 0; $i < $rows; $i++) { - $out = pg_fetch_array($res, $i); - $links['out'][] = array($out['topage'], $out['score']); - } - - $res = pg_exec("select frompage, score from wikilinks, wikiscore where frompage=pagename and topage='$pagename' order by score desc, frompage"); - $rows = pg_numrows($res); - for ($i = 0; $i < $rows; $i++) { - $out = pg_fetch_array($res, $i); - $links['in'][] = array($out['frompage'], $out['score']); - } - - $res = pg_exec("select distinct pagename, hits from wikilinks, hitcount where (frompage=pagename and topage='$pagename') or (topage=pagename and frompage='$pagename') order by hits desc, pagename"); - $rows = pg_numrows($res); - for ($i = 0; $i < $rows; $i++) { - $out = pg_fetch_array($res, $i); - $links['popular'][] = array($out['pagename'], $out['hits']); - } - - return $links; - - } - - - // takes page name, list of links it contains - // the $linklist is an array where the keys are the page names - - function SetWikiPageLinks($dbi, $pagename, $linklist) { - global $WikiLinksPageStore; - $frompage = addslashes($pagename); - - // first delete the old list of links - $query = "delete from $WikiLinksPageStore where frompage='$frompage'"; - //echo "$query
    \n"; - $res = pg_exec($dbi['dbc'], $query); - - // the page may not have links, return if not - if (! count($linklist)) - return; - - // now insert the new list of links - reset($linklist); - while (list($topage, $count) = each($linklist)) { - $topage = addslashes($topage); - if ($topage != $frompage) { - $query = "insert into $WikiLinksPageStore (frompage, topage) " . - "values ('$frompage', '$topage')"; - //echo "$query
    \n"; - $res = pg_exec($dbi['dbc'], $query); - } - } - // update pagescore - pg_exec("delete from wikiscore"); - pg_exec("insert into wikiscore select w1.topage, count(*) from wikilinks as w1, wikilinks as w2 where w2.topage=w1.frompage group by w1.topage"); - - } - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// tab-width: 4 -// indent-tabs-mode: nil -// End: -?> diff --git a/lib/prepend.php b/lib/prepend.php index 4de5da922..d24cff831 100644 --- a/lib/prepend.php +++ b/lib/prepend.php @@ -5,117 +5,52 @@ */ $RCS_IDS = ''; function rcs_id ($id) { $GLOBALS['RCS_IDS'] .= "$id\n"; } -rcs_id('$Id: prepend.php,v 1.3 2001-02-14 22:02:05 dairiki Exp $'); +rcs_id('$Id: prepend.php,v 1.4 2001-09-18 19:16:23 dairiki Exp $'); error_reporting(E_ALL); +require_once('lib/ErrorManager.php'); -define ('FATAL_ERRORS', - E_ERROR | E_PARSE | E_CORE_ERROR | E_COMPILE_ERROR | E_USER_ERROR); -define ('WARNING_ERRORS', - E_WARNING | E_CORE_WARNING | E_COMPILE_WARNING | E_USER_WARNING); -define ('NOTICE_ERRORS', E_NOTICE | E_USER_NOTICE); - -$PostponedErrorMask = 0; -$PostponedErrors = array(); - -function PostponeErrorMessages ($newmask = -1) -{ - global $PostponedErrorMask, $PostponedErrors; - - if ($newmask < 0) - return $PostponedErrorMask; - - $oldmask = $PostponedErrorMask; - $PostponedErrorMask = $newmask; - - $i = 0; - while ($i < sizeof($PostponedErrors)) - { - list ($errno, $message) = $PostponedErrors[$i]; - if (($errno & $newmask) == 0) - { - echo $message; - array_splice($PostponedErrors, $i, 1); - } - else - $i++; - } - - return $oldmask; -} - +// FIXME: make this part of Request? function ExitWiki($errormsg = false) { - static $exitwiki = 0; - global $dbi; - - if($exitwiki) // just in case CloseDataBase calls us - exit(); - $exitwiki = 1; - - PostponeErrorMessages(0); // Spew postponed messages. - - if(!empty($errormsg)) { - print "


    " . gettext("WikiFatalError") . "

    \n"; - print $errormsg; - print "\n"; - } - - if (isset($dbi)) - CloseDataBase($dbi); - exit; -} - -function PostponeErrorHandler ($errno, $errstr, $errfile, $errline) -{ - global $PostponedErrorMask, $PostponedErrors; - static $inHandler = 0; - - if ($inHandler++ != 0) - return; // prevent recursion. - - if (($errno & NOTICE_ERRORS) != 0) - $what = 'Notice'; - else if (($errno & WARNING_ERRORS) != 0) - $what = 'Warning'; - else - $what = 'Fatal'; + static $in_exit = 0; + global $dbi, $request; - $errfile = ereg_replace('^' . getcwd() . '/', '', $errfile); - $message = sprintf("
    %s:%d: %s[%d]: %s
    \n", - htmlspecialchars($errfile), - $errline, $what, $errno, - htmlspecialchars($errstr)); + if($in_exit) + exit(); // just in case CloseDataBase calls us + $in_exit = true; + if (!empty($dbi)) + $dbi->close(); - if ($what == 'Fatal') - { - PostponeErrorMessages(0); // Spew postponed messages. - echo $message; - ExitWiki(); - exit -1; - } - else if (($errno & error_reporting()) != 0) - { - if (($errno & $PostponedErrorMask) != 0) - { - $PostponedErrors[] = array($errno, $message); - } - - else - echo $message; - } - - $inHandler = 0; + global $ErrorManager; + $ErrorManager->flushPostponedErrors(); + + if(!empty($errormsg)) { + print "


    " . gettext("WikiFatalError") . "

    \n"; + + if (is_string($errormsg)) + print $errormsg; + else + $errormsg->printError(); + + print "\n"; + } + + $request->finish(); + exit; } -set_error_handler('PostponeErrorHandler'); +$ErrorManager->setPostponedErrorMask(E_ALL); +$ErrorManager->setFatalHandler('ExitWiki'); -PostponeErrorMessages(E_ALL); -// For emacs users +// (c-file-style: "gnu") // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/savepage.php b/lib/savepage.php index f2e01426d..1385438b9 100644 --- a/lib/savepage.php +++ b/lib/savepage.php @@ -1,4 +1,7 @@ -"; - $html .= gettext ("PhpWiki is unable to save your changes, because another user edited and saved the page while you were editing the page too. If saving proceeded now changes from the previous author would be lost."); - $html .= "

    \n

    "; - $html .= gettext ("In order to recover from this situation follow these steps:"); - $html .= "\n

    1. "; - $html .= gettext ("Use your browser's Back button to go back to the edit page."); - $html .= "\n
    2. "; - $html .= gettext ("Copy your changes to the clipboard or to another temporary place (e.g. text editor)."); - $html .= "\n
    3. "; - $html .= gettext ("Reload the page. You should now see the most current version of the page. Your changes are no longer there."); - $html .= "\n
    4. "; - $html .= gettext ("Make changes to the file again. Paste your additions from the clipboard (or text editor)."); - $html .= "\n
    5. "; - $html .= gettext ("Press Save again."); - $html .= "
    \n

    "; - $html .= gettext ("Sorry for the inconvenience."); - $html .= "

    "; - - echo GeneratePage('MESSAGE', $html, - sprintf (gettext ("Problem while updating %s"), $pagename), 0); - ExitWiki(); - } - - - $pagehash = RetrievePage($dbi, $pagename, $WikiPageStore, 0); - - // if this page doesn't exist yet, now's the time! - if (! is_array($pagehash)) { - $pagehash = array(); - $pagehash['version'] = 0; - $pagehash['created'] = time(); - $pagehash['flags'] = 0; - $newpage = 1; - } else { - if (($pagehash['flags'] & FLAG_PAGE_LOCKED) && ! $user->is_admin()) { - $html = "

    " . gettext ("This page has been locked by the administrator and cannot be edited."); - $html .= "\n

    " . gettext ("Sorry for the inconvenience."); - echo GeneratePage('MESSAGE', $html, - sprintf (gettext ("Problem while editing %s"), $pagename), 0); - ExitWiki (""); - } - - if(isset($editversion) && ($editversion != $pagehash['version'])) { - ConcurrentUpdates($pagename); - } - - SavePageToArchive($pagename, $pagehash); - $newpage = 0; - } - - // set new pageinfo - $pagehash['lastmodified'] = time(); - $pagehash['version']++; - $pagehash['author'] = $user->id(); - - // create page header - $html = sprintf(gettext("Thank you for editing %s."), - LinkExistingWikiWord($pagename)); - $html .= "
    \n"; - - if (! empty($content)) { - // patch from Grant Morgan for magic_quotes_gpc - fix_magic_quotes_gpc($content); - - $pagehash['content'] = preg_split('/[ \t\r]*\n/', chop($content)); - - // convert spaces to tabs at user request - if (isset($convert)) { - $pagehash['content'] = CookSpaces($pagehash['content']); - } - } - - ReplaceCurrentPage($pagename, $pagehash); - UpdateRecentChanges($dbi, $pagename, $newpage); - - $html .= gettext ("Your careful attention to detail is much appreciated."); - $html .= "\n"; - - // fixme: no test for flat file db system - if (!empty($DBWarning)) { - $html .= "

    Warning: $DBWarning" . - "Please read the INSTALL file and move " . - "the DB file to a permanent location or risk losing " . - "all the pages!\n"; - } - - if (!empty($SignatureImg)) - $html .= sprintf("

    \n", DataURL($SignatureImg)); - - $html .= "
    \n"; - include('lib/transform.php'); - - echo GeneratePage('BROWSE', $html, $pagename, $pagehash); +// FIXME: some links so that it's easy to get back to someplace useful from these +// error pages. + +function ConcurrentUpdates($pagename) { + /* xgettext only knows about c/c++ line-continuation strings + is does not know about php's dot operator. + We want to translate this entire paragraph as one string, of course. + */ + $html = "

    "; + $html .= gettext ("PhpWiki is unable to save your changes, because another user edited and saved the page while you were editing the page too. If saving proceeded now changes from the previous author would be lost."); + $html .= "

    \n

    "; + $html .= gettext ("In order to recover from this situation follow these steps:"); + $html .= "\n

    1. "; + $html .= gettext ("Use your browser's Back button to go back to the edit page."); + $html .= "\n
    2. "; + $html .= gettext ("Copy your changes to the clipboard or to another temporary place (e.g. text editor)."); + $html .= "\n
    3. "; + $html .= gettext ("Reload the page. You should now see the most current version of the page. Your changes are no longer there."); + $html .= "\n
    4. "; + $html .= gettext ("Make changes to the file again. Paste your additions from the clipboard (or text editor)."); + $html .= "\n
    5. "; + $html .= gettext ("Press Save again."); + $html .= "
    \n

    "; + $html .= gettext ("Sorry for the inconvenience."); + $html .= "

    "; + + echo GeneratePage('MESSAGE', $html, + sprintf (gettext ("Problem while updating %s"), $pagename)); + ExitWiki(); +} + +function PageIsLocked($pagename) { + $html = QElement('p', + gettext("This page has been locked by the administrator and cannot be edited.")); + $html .= QElement('p', + gettext ("Sorry for the inconvenience.")); + + echo GeneratePage('MESSAGE', $html, + sprintf (gettext ("Problem while editing %s"), $pagename)); + ExitWiki (""); +} + +function NoChangesMade($pagename) { + $html = QElement('p', gettext ("You have not made any changes.")); + $html .= QElement('p', gettext ("New version not saved.")); + echo GeneratePage('MESSAGE', $html, + sprintf(gettext("Edit aborted: %s"), $pagename)); + ExitWiki (""); +} + +function BadFormVars($pagename) { + $html = QElement('p', gettext ("Bad form submission")); + $html .= QElement('p', gettext ("Required form variables are missing.")); + echo GeneratePage('MESSAGE', $html, + sprintf(gettext("Edit aborted: %s"), $pagename)); + ExitWiki (""); +} + +function savePreview($dbi, $request) { + $pagename = $request->getArg('pagename'); + $version = $request->getArg('version'); + + $page = $dbi->getPage($pagename); + $selected = $page->getRevision($version); + + // FIXME: sanity checking about posted variables + // FIXME: check for simultaneous edits. + foreach (array('minor_edit', 'convert') as $key) + $formvars[$key] = $request->getArg($key) ? 'checked' : ''; + foreach (array('content', 'editversion', 'summary', 'pagename', 'version') as $key) + @$formvars[$key] = htmlspecialchars($request->getArg($key)); + + $template = new WikiTemplate('EDITPAGE'); + $template->setPageRevisionTokens($selected); + $template->replace('FORMVARS', $formvars); + $template->replace('PREVIEW_CONTENT', do_transform($request->getArg('content'))); + echo $template->getExpansion(); +} + +function savePage ($dbi, $request) { + global $user; + + // FIXME: fail if this check fails? + assert($request->get('REQUEST_METHOD') == 'POST'); + + if ($request->getArg('preview')) + return savePreview($dbi, $request); + + $pagename = $request->getArg('pagename'); + $version = $request->getArg('version'); + + $page = $dbi->getPage($pagename); + $current = $page->getCurrentRevision(); + + $content = $request->getArg('content'); + $editversion = $request->getArg('editversion'); + + if ( $content === false || $editversion === false ) + BadFormVars($pagename); // noreturn + + if ($page->get('locked') && !$user->is_admin()) + PageIsLocked($args->pagename); // noreturn. + + $meta['author'] = $user->id(); + $meta['author_id'] = $user->authenticated_id(); + $meta['is_minor_edit'] = (bool) $request->getArg('minor_edit'); + $meta['summary'] = trim($request->getArg('summary')); + + $content = preg_replace('/[ \t\r]+\n/', "\n", chop($content)); + if ($request->getArg('convert')) + $content = CookSpaces($content); + + if ($content == $current->getPackedContent()) { + NoChangesMade($pagename); // noreturn + } + + //////////////////////////////////////////////////////////////// + // + // From here on, we're actually saving. + // + $newrevision = $page->createRevision($editversion + 1, + $content, $meta, + ExtractWikiPageLinks($content)); + if (!is_object($newrevision)) { + // Save failed. + ConcurrentUpdates($pagename); + } + + // Clean out archived versions of this page. + $cleaner = new ArchiveCleaner($GLOBALS['ExpireParams']); + $cleaner->cleanPageRevisions($page); + + $warnings = $dbi->GenericWarnings(); + if (empty($warnings)) { + // Do redirect to browse page. + // In this case, the user will most likely not see the rest of + // the HTML we generate (below). + $request->redirect(WikiURL($pagename, false, 'absolute_url')); + } + + $html = sprintf(gettext("Thank you for editing %s."), + LinkExistingWikiWord($pagename)); + $html .= "
    \n"; + $html .= gettext ("Your careful attention to detail is much appreciated."); + $html .= "\n"; + + if ($warnings) { + $html .= Element('p', "Warning! " + . htmlspecialchars($warnings) + . "
    \n"); + } + + global $SignatureImg; + if (!empty($SignatureImg)) + $html .= sprintf("

    \n", DataURL($SignatureImg)); + + $html .= "
    \n"; + $html .= do_transform($newrevision->getContent()); + echo GeneratePage('BROWSE', $html, $pagename, $newrevision); +} + + +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: ?> diff --git a/lib/search.php b/lib/search.php index 459efb61c..293956c15 100644 --- a/lib/search.php +++ b/lib/search.php @@ -1,37 +1,29 @@ get('searchterm')); +$search_descrip = htmlspecialchars($search_descrip); -$html = "

    " - . sprintf(gettext ("Searching for \"%s\" ....."), - htmlspecialchars($searchterm)) - . "

    \n"; +$html = "

    $search_descrip

    \n
      "; -// quote regexp chars -$search = preg_quote($searchterm); +$iter = $dbi->titleSearch(new TextSearchQuery($args->get('searchterm'))); - -// search matching pages -$query = InitTitleSearch($dbi, $searchterm); $found = 0; - -while ($page = TitleSearchNextMatch($dbi, $query)) { +while ($page = $iter->next()) { $found++; - $html .= LinkExistingWikiWord($page) . "
      \n"; + $html .= "
    • " . LinkExistingWikiWord($page->getName()) . "\n"; } -$html .= "
      \n" - . sprintf(gettext ("%d pages match your query."), $found) - . "\n"; +$html .= ("

    \n" + . sprintf(gettext ("%d pages match your query."), $found) + . "\n"); -echo GeneratePage('MESSAGE', $html, gettext ("Title Search Results"), 0); +echo GeneratePage('MESSAGE', $html, "$search_title: $searchterm"); ?> diff --git a/lib/stdlib.php b/lib/stdlib.php index 0265863f7..875545ef0 100644 --- a/lib/stdlib.php +++ b/lib/stdlib.php @@ -1,10 +1,9 @@ - $val) { + $enc_args[] = urlencode($key) . '=' . urlencode($val); + } + $args = join('&', $enc_args); + } + + if (USE_PATH_INFO) { + $url = $get_abs_url ? SERVER_URL . VIRTUAL_PATH . "/" : ''; + $url .= rawurlencode($pagename); + if ($args) + $url .= "?$args"; + } + else { + $url = $get_abs_url ? SERVER_URL . SCRIPT_NAME : basename(SCRIPT_NAME); + $url .= "?pagename=" . rawurlencode($pagename); + if ($args) + $url .= "&$args"; + } + + return $url; +} function StartTag($tag, $args = '') { @@ -128,41 +107,57 @@ function StartTag($tag, $args = '') return "BAD URL -- remove all of <, >, ""; } + if (empty($linktext)) { + $linktext = $url; + $class = 'rawurl'; + } + else { + $class = 'namedurl'; + } - if (empty($linktext)) - $linktext = QElement('span', array('class' => 'rawurl'), $url); - else - $linktext = htmlspecialchars($linktext); - - return Element('a', - array('href' => $url, 'class' => 'linkurl'), - $linktext); + return QElement('a', + array('href' => $url, 'class' => $class), + $linktext); } +function LinkWikiWord($wikiword, $linktext='') { + global $dbi; + if ($dbi->isWikiPage($wikiword)) + return LinkExistingWikiWord($wikiword, $linktext); + else + return LinkUnknownWikiWord($wikiword, $linktext); +} + + function LinkExistingWikiWord($wikiword, $linktext='') { - if (empty($linktext)) - $linktext = QElement('span', array('class' => 'wikiword'), $wikiword); + if (empty($linktext)) { + $linktext = $wikiword; + $class = 'wiki'; + } else - $linktext = htmlspecialchars($linktext); + $class = 'named-wiki'; - return Element('a', array('href' => WikiURL($wikiword), - 'class' => 'wikilink'), + return QElement('a', array('href' => WikiURL($wikiword), + 'class' => $class), $linktext); } function LinkUnknownWikiWord($wikiword, $linktext='') { - if (empty($linktext)) - $linktext = QElement('span', array('class' => 'wikiword'), $wikiword); - else - $linktext = htmlspecialchars($linktext); + if (empty($linktext)) { + $linktext = $wikiword; + $class = 'wikiunknown'; + } + else { + $class = 'named-wikiunknown'; + } - return Element('span', array('class' => 'wikiunknown'), - QElement('a', array('href' => WikiURL($wikiword, array('action' => 'edit')),'class' => 'wikiunknown'),'?') -. Element('u', $linktext) -); + return Element('span', array('class' => $class), + QElement('a', + array('href' => WikiURL($wikiword, array('action' => 'edit'))), + '?') + . Element('u', $linktext)); } - function LinkImage($url, $alt='[External Image]') { // FIXME: Is this needed (or sufficient?) // As long as the src in htmlspecialchars()ed I think it's safe. @@ -172,7 +167,6 @@ function StartTag($tag, $args = '') return Element('img', array('src' => $url, 'alt' => $alt)); } - // converts spaces to tabs function CookSpaces($pagearray) { return preg_replace("/ {3,8}/", "\t", $pagearray); @@ -215,7 +209,7 @@ function StartTag($tag, $args = '') function MakeWikiForm ($pagename, $args, $class, $button_text = '') { $formargs['action'] = USE_PATH_INFO ? WikiURL($pagename) : SCRIPT_NAME; - $formargs['method'] = 'post'; + $formargs['method'] = 'get'; $formargs['class'] = $class; $contents = ''; @@ -239,6 +233,7 @@ function StartTag($tag, $args = '') array('name' => 'MAX_FILE_SIZE', 'value' => MAX_UPLOAD_SIZE, 'type' => 'hidden')); + $formargs['method'] = 'post'; } } @@ -249,6 +244,7 @@ function StartTag($tag, $args = '') if (!empty($button_text)) { $row .= Element('td', Element('input', array('type' => 'submit', + 'class' => 'button', 'value' => $button_text))); } @@ -267,63 +263,62 @@ function StartTag($tag, $args = '') return $args; } - function LinkPhpwikiURL($url, $text = '') { - global $pagename; - $args = array(); - $page = $pagename; +function LinkPhpwikiURL($url, $text = '') { + $args = array(); - if (!preg_match('/^ phpwiki: ([^?]*) [?]? (.*) $/x', $url, $m)) - return "BAD phpwiki: URL"; + if (!preg_match('/^ phpwiki: ([^?]*) [?]? (.*) $/x', $url, $m)) + return "BAD phpwiki: URL"; - if ($m[1]) - $page = urldecode($m[1]); - $qargs = $m[2]; + if ($m[1]) + $pagename = urldecode($m[1]); + $qargs = $m[2]; - if (!$page && preg_match('/^(diff|edit|links|info|diff)=([^&]+)$/', $qargs, $m)) - { - // Convert old style links (to not break diff links in RecentChanges). - $page = urldecode($m[2]); - $args = array("action" => $m[1]); - } - else - { - $args = SplitQueryArgs($qargs); - } - - if (isset($args['action']) && $args['action'] == 'browse') - unset($args['action']); - - if (empty($args['action'])) - $class = 'wikilink'; - else if (IsSafeAction($args['action'])) - $class = 'wikiaction'; - else - { - // Don't allow administrative links on unlocked pages. - // FIXME: Ugh: don't like this... - global $pagehash; - if (($pagehash['flags'] & FLAG_PAGE_LOCKED) == 0) - return QElement('u', array('class' => 'wikiunsafe'), - gettext('Lock page to enable link')); - - $class = 'wikiadmin'; - } + if (empty($pagename) && preg_match('/^(diff|edit|links|info)=([^&]+)$/', $qargs, $m)) { + // Convert old style links (to not break diff links in RecentChanges). + $pagename = urldecode($m[2]); + $args = array("action" => $m[1]); + } + else { + $args = SplitQueryArgs($qargs); + } + + if (empty($pagename)) + $pagename = $GLOBALS['pagename']; + + if (isset($args['action']) && $args['action'] == 'browse') + unset($args['action']); + /*FIXME: + if (empty($args['action'])) + $class = 'wikilink'; + else if (is_safe_action($args['action'])) + $class = 'wikiaction'; + */ + if (empty($args['action']) || is_safe_action($args['action'])) + $class = 'wikiaction'; + else { + // Don't allow administrative links on unlocked pages. + // FIXME: Ugh: don't like this... + global $dbi; + $page = $dbi->getPage($GLOBALS['pagename']); + if (!$page->get('locked')) + return QElement('u', array('class' => 'wikiunsafe'), + gettext('Lock page to enable link')); + + $class = 'wikiadmin'; + } - // FIXME: ug, don't like this - if (preg_match('/=\d*\(/', $qargs)) - return MakeWikiForm($page, $args, $class, $text); - else - { - if ($text) - $text = htmlspecialchars($text); - else - $text = QElement('span', array('class' => 'rawurl'), $url); - - return Element('a', array('href' => WikiURL($page, $args), - 'class' => $class), - $text); - } - } + // FIXME: ug, don't like this + if (preg_match('/=\d*\(/', $qargs)) + return MakeWikiForm($pagename, $args, $class, $text); + if ($text) + $text = htmlspecialchars($text); + else + $text = QElement('span', array('class' => 'rawurl'), $url); + + return Element('a', array('href' => WikiURL($pagename, $args), + 'class' => $class), + $text); +} function ParseAndLink($bracketlink) { global $dbi, $AllowedProtocols, $InlineImages; @@ -349,7 +344,7 @@ function StartTag($tag, $args = '') $linktype = 'simple'; } - if (IsWikiPage($dbi, $URL)) { + if ($dbi->isWikiPage($URL)) { $link['type'] = "wiki-$linktype"; $link['link'] = LinkExistingWikiWord($URL, $linkname); } elseif (preg_match("#^($AllowedProtocols):#", $URL)) { @@ -380,38 +375,41 @@ function StartTag($tag, $args = '') } - function ExtractWikiPageLinks($content) - { - global $WikiNameRegexp; - - $wikilinks = array(); - $numlines = count($content); - for($l = 0; $l < $numlines; $l++) - { - // remove escaped '[' - $line = str_replace('[[', ' ', $content[$l]); - - // bracket links (only type wiki-* is of interest) - $numBracketLinks = preg_match_all("/\[\s*([^\]|]+\|)?\s*(.+?)\s*\]/", $line, $brktlinks); - for ($i = 0; $i < $numBracketLinks; $i++) { - $link = ParseAndLink($brktlinks[0][$i]); - if (preg_match("#^wiki#", $link['type'])) - $wikilinks[$brktlinks[2][$i]] = 1; - - $brktlink = preg_quote($brktlinks[0][$i]); - $line = preg_replace("|$brktlink|", '', $line); - } - - // BumpyText old-style wiki links - if (preg_match_all("/!?$WikiNameRegexp/", $line, $link)) { - for ($i = 0; isset($link[0][$i]); $i++) { - if($link[0][$i][0] <> '!') - $wikilinks[$link[0][$i]] = 1; - } - } +function ExtractWikiPageLinks($content) +{ + global $WikiNameRegexp; + + if (is_string($content)) + $content = explode("\n", $content); + + $wikilinks = array(); + foreach ($content as $line) { + // remove plugin code + $line = preg_replace('/<\?plugin\s+\w.*?\?>/', '', $line); + // remove escaped '[' + $line = str_replace('[[', ' ', $line); + + // bracket links (only type wiki-* is of interest) + $numBracketLinks = preg_match_all("/\[\s*([^\]|]+\|)?\s*(.+?)\s*\]/", $line, $brktlinks); + for ($i = 0; $i < $numBracketLinks; $i++) { + $link = ParseAndLink($brktlinks[0][$i]); + if (preg_match("#^wiki#", $link['type'])) + $wikilinks[$brktlinks[2][$i]] = 1; + + $brktlink = preg_quote($brktlinks[0][$i]); + $line = preg_replace("|$brktlink|", '', $line); + } + + // BumpyText old-style wiki links + if (preg_match_all("/!?$WikiNameRegexp/", $line, $link)) { + for ($i = 0; isset($link[0][$i]); $i++) { + if($link[0][$i][0] <> '!') + $wikilinks[$link[0][$i]] = 1; + } } - return $wikilinks; - } + } + return array_keys($wikilinks); +} function LinkRelatedPages($dbi, $pagename) { @@ -419,6 +417,7 @@ function StartTag($tag, $args = '') if(!function_exists('GetWikiPageLinks')) return ''; + //FIXME: fix or toss? $links = GetWikiPageLinks($dbi, $pagename); $txt = ""; @@ -437,7 +436,7 @@ function StartTag($tag, $args = '') for($i = 0; $i < NUM_RELATED_PAGES; $i++) { if(isset($links['out'][$i])) { list($name, $score) = $links['out'][$i]; - if(IsWikiPage($dbi, $name)) + if($dbi->isWikiPage($name)) $txt .= LinkExistingWikiWord($name) . " ($score), "; } } @@ -455,220 +454,60 @@ function StartTag($tag, $args = '') return $txt; } - - # GeneratePage() -- takes $content and puts it in the template $template - # this function contains all the template logic - # - # $template ... name of the template (see config.php for list of names) - # $content ... html content to put into the page - # $name ... page title - # $hash ... if called while creating a wiki page, $hash points to - # the $pagehash array of that wiki page. - - function GeneratePage($template, $content, $name, $hash) - { - global $templates; - global $datetimeformat, $dbi, $logo, $FieldSeparator; - global $user, $pagename; - global $WikiPageStore; - - if (!is_array($hash)) - unset($hash); - function _dotoken ($id, $val, &$page) { - global $FieldSeparator; - $page = str_replace("$FieldSeparator#$id$FieldSeparator#", - $val, $page); - } - - function _iftoken ($id, $condition, &$page) { - global $FieldSeparator; - - // line based IF directive - $lineyes = "$FieldSeparator#IF $id$FieldSeparator#"; - $lineno = "$FieldSeparator#IF !$id$FieldSeparator#"; - // block based IF directive - $blockyes = "$FieldSeparator#IF:$id$FieldSeparator#"; - $blockyesend = "$FieldSeparator#ENDIF:$id$FieldSeparator#"; - $blockno = "$FieldSeparator#IF:!$id$FieldSeparator#"; - $blocknoend = "$FieldSeparator#ENDIF:!$id$FieldSeparator#"; - - if ($condition) { - $page = str_replace($lineyes, '', $page); - $page = str_replace($blockyes, '', $page); - $page = str_replace($blockyesend, '', $page); - $page = preg_replace("/$blockno(.*?)$blocknoend/s", '', $page); - $page = ereg_replace("${lineno}[^\n]*\n", '', $page); - } else { - $page = str_replace($lineno, '', $page); - $page = str_replace($blockno, '', $page); - $page = str_replace($blocknoend, '', $page); - $page = preg_replace("/$blockyes(.*?)$blockyesend/s", '', $page); - $page = ereg_replace("${lineyes}[^\n]*\n", '', $page); - } - } - - $page = join('', file(FindLocalizedFile($templates[$template]))); - $page = str_replace('###', "$FieldSeparator#", $page); - - // valid for all pagetypes - _iftoken('COPY', isset($hash['copy']), $page); - _iftoken('LOCK', (isset($hash['flags']) && - ($hash['flags'] & FLAG_PAGE_LOCKED)), $page); - _iftoken('ADMIN', $user->is_admin(), $page); - _iftoken('ANONYMOUS', !$user->is_authenticated(), $page); - _iftoken('CURRENT', isset($hash['version']) && $hash['version'] == GetMaxVersionNumber($dbi, $hash['pagename'], $WikiPageStore), $page); - - if (empty($hash['minor_edit_checkbox'])) - $hash['minor_edit_checkbox'] = ''; - _iftoken('MINOR_EDIT_CHECKBOX', $hash['minor_edit_checkbox'], $page); - - _dotoken('MINOR_EDIT_CHECKBOX', $hash['minor_edit_checkbox'], $page); - - _dotoken('USERID', htmlspecialchars($user->id()), $page); - _dotoken('PAGE', htmlspecialchars($name), $page); - _dotoken('SPLIT_PAGE', - htmlspecialchars( - preg_replace('/([[:lower:]])([[:upper:]])/', '\\1 \\2', $name)), - $page); - _dotoken('LOGO', htmlspecialchars(DataURL($logo)), $page); - _dotoken('CSS_URL', htmlspecialchars(DataURL(CSS_URL)), $page); - - _dotoken('RCS_IDS', $GLOBALS['RCS_IDS'], $page); - - $prefs = $user->getPreferences(); - _dotoken('EDIT_AREA_WIDTH', $prefs['edit_area.width'], $page); - _dotoken('EDIT_AREA_HEIGHT', $prefs['edit_area.height'], $page); - - // FIXME: Clean up this stuff - _dotoken('BROWSE', WikiURL(''), $page); - - if (USE_PATH_INFO) - _dotoken('BASE_URL', - SERVER_URL . VIRTUAL_PATH . "/" . WikiURL($pagename), $page); - else - _dotoken('BASE_URL', SERVER_URL . SCRIPT_NAME, $page); - - if ($GLOBALS['action'] != 'browse') - _dotoken('ROBOTS_META', - Element('meta', array('name' => 'robots', - 'content' => 'noindex, nofollow')), - $page); - else - _dotoken('ROBOTS_META', '', $page); - - - // invalid for messages (search results, error messages) - if ($template != 'MESSAGE') { - $browse_page = WikiURL($name); - _dotoken('BROWSE_PAGE', $browse_page, $page); - - $arg_sep = strstr($browse_page, '?') ? '&' : '?'; - _dotoken('ACTION', $browse_page . $arg_sep . "action=", $page); - - _dotoken('PAGEURL', rawurlencode($name), $page); - if (!empty($hash['lastmodified'])) - _dotoken('LASTMODIFIED', - strftime($datetimeformat, $hash['lastmodified']), $page); - if (!empty($hash['author'])) - _dotoken('LASTAUTHOR', $hash['author'], $page); - if (!empty($hash['version'])) - _dotoken('VERSION', $hash['version'], $page); - if (!empty($hash['pagename'])) - _dotoken('CURRENT_VERSION', GetMaxVersionNumber($dbi, $hash['pagename'], $WikiPageStore), $page); - if (strstr($page, "$FieldSeparator#HITS$FieldSeparator#")) { - _dotoken('HITS', GetHitCount($dbi, $name), $page); - } - if (strstr($page, "$FieldSeparator#RELATEDPAGES$FieldSeparator#")) { - _dotoken('RELATEDPAGES', LinkRelatedPages($dbi, $name), $page); - } - } - - _dotoken('CONTENT', $content, $page); - return $page; - } - -function UpdateRecentChanges($dbi, $pagename, $isnewpage) -{ - global $user; - global $dateformat; - global $WikiPageStore; - - $recentchanges = RetrievePage($dbi, gettext ("RecentChanges"), $WikiPageStore, 0); - - // this shouldn't be necessary, since PhpWiki loads - // default pages if this is a new baby Wiki - $now = time(); - $today = strftime($dateformat, $now); - - if (is_array($recentchanges)) { - $isNewDay = strftime($dateformat, $recentchanges['lastmodified']) != $today; - } - else { - $recentchanges = array('version' => 1, - 'created' => $now, - 'flags' => FLAG_PAGE_LOCKED, - 'author' => $GLOBALS['user']->id()); - $recentchanges['content'] - = array(gettext("The most recently changed pages are listed below."), - '', - "____$today " . gettext("(first day for this Wiki)"), - '', - gettext("Quick title search:"), - '[phpwiki:?action=search&searchterm=()]', - '----'); - $isNewDay = 0; - } - $recentchanges['lastmodified'] = $now; - - $numlines = sizeof($recentchanges['content']); - $newpage = array(); - $k = 0; - - // scroll through the page to the first date and break - // dates are marked with "____" at the beginning of the line - for ($i = 0; $i < $numlines; $i++) { - if (preg_match("/^____/", - $recentchanges['content'][$i])) { - break; - } else { - $newpage[$k++] = $recentchanges['content'][$i]; - } - } - - // if it's a new date, insert it - $newpage[$k++] = $isNewDay ? "____$today\r" - : $recentchanges['content'][$i++]; - - $userid = $user->id(); - - // add the updated page's name to the array - if($isnewpage) { - $newpage[$k++] = "* [$pagename] (new) ..... $userid\r"; - } else { - $diffurl = "phpwiki:" . rawurlencode($pagename) . "?action=diff"; - $newpage[$k++] = "* [$pagename] ([diff|$diffurl]) ..... $userid\r"; - } - if ($isNewDay) - $newpage[$k++] = "\r"; - - // copy the rest of the page into the new array - // and skip previous entry for $pagename - $pagename = preg_quote($pagename); - for (; $i < $numlines; $i++) { - if (!preg_match("|\[$pagename\]|", $recentchanges['content'][$i])) { - $newpage[$k++] = $recentchanges['content'][$i]; - } - } - - $recentchanges['content'] = $newpage; +/** + * Split WikiWords in page names. + * + * It has been deemed useful to split WikiWords (into "Wiki Words") + * in places like page titles. This is rumored to help search engines + * quite a bit. + * + * @param $page string The page name. + * + * @return string The split name. + */ +function split_pagename ($page) { + + if (preg_match("/\s/", $page)) + return $page; // Already split --- don't split any more. + + // FIXME: this algorithm is Anglo-centric. + static $RE; + if (!isset($RE)) { + // This mess splits between a lower-case letter followed by either an upper-case + // or a numeral; except that it wont split the prefixes 'Mc', 'De', or 'Di' off + // of their tails. + $RE[] = '/([[:lower:]])((? $val) + $RE[$key] = pcre_fix_posix_classes($val); + } + + foreach ($RE as $regexp) + $page = preg_replace($regexp, '\\1 \\2', $page); + return $page; +} - ReplaceCurrentPage(gettext ("RecentChanges"), $recentchanges); +function NoSuchRevision ($page, $version) { + $html = "

    " . gettext("Bad Version") . "\n

    "; + $html .= sprintf(gettext("I'm sorry. Version %d of %s is not in my database."), + $version, htmlspecialchars($page->getName())); + $html .= "\n"; + echo GeneratePage('MESSAGE', $html, gettext("Bad Version")); + ExitWiki (""); } -// For emacs users + +// (c-file-style: "gnu") // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/transform.php b/lib/transform.php index 441605cd9..8369e9fcc 100644 --- a/lib/transform.php +++ b/lib/transform.php @@ -1,4 +1,5 @@ -register(WT_SIMPLE_MARKUP, 'wtm_plugin_link'); +$transform->register(WT_MODE_MARKUP, 'wtm_plugin'); + $transform->register(WT_TOKENIZER, 'wtt_doublebrackets', '\[\['); $transform->register(WT_TOKENIZER, 'wtt_footnotes', '^\[\d+\]'); $transform->register(WT_TOKENIZER, 'wtt_footnoterefs', '\[\d+\]'); @@ -294,9 +305,6 @@ if (function_exists('wtm_table')) { $transform->register(WT_SIMPLE_MARKUP, 'wtm_htmlchars'); $transform->register(WT_SIMPLE_MARKUP, 'wtm_linebreak'); $transform->register(WT_SIMPLE_MARKUP, 'wtm_bold_italics'); - $transform->register(WT_SIMPLE_MARKUP, 'wtm_title_search'); - $transform->register(WT_SIMPLE_MARKUP, 'wtm_fulltext_search'); - $transform->register(WT_SIMPLE_MARKUP, 'wtm_mostpopular'); $transform->register(WT_MODE_MARKUP, 'wtm_list_ul'); $transform->register(WT_MODE_MARKUP, 'wtm_list_ol'); @@ -306,7 +314,9 @@ if (function_exists('wtm_table')) { $transform->register(WT_MODE_MARKUP, 'wtm_hr'); $transform->register(WT_MODE_MARKUP, 'wtm_paragraph'); - $html = $transform->do_transform($html, $pagehash['content']); +//$html = $transform->do_transform($html, $pagehash['content']); + return $transform->do_transform('', $lines); +} /* Requirements for functions registered to WikiTransform: @@ -391,6 +401,10 @@ function wtt_footnoterefs($match, &$trfrm) function wtt_bracketlinks($match, &$trfrm) { $link = ParseAndLink($match); + if (strstr($link['link'], "")) { + // FIXME: BIG HACK: see note in wtm_plugin. + return "

    " . $link['link'] . "

    "; + } return $link["link"]; } @@ -413,10 +427,7 @@ function wtt_bumpylinks($match, &$trfrm) global $dbi; if ($match[0] == "!") return htmlspecialchars(substr($match,1)); - // FIXME: make a LinkWikiWord() function? - if (IsWikiPage($dbi, $match)) - return LinkExistingWikiWord($match); - return LinkUnknownWikiWord($match); + return LinkWikiWord($match); } // end of tokenizer functions @@ -453,44 +464,34 @@ function wtt_bumpylinks($match, &$trfrm) ////////////////////////////////////////////////////////// // some tokens to be replaced by (dynamic) content - // wiki token: title search dialog - function wtm_title_search($line, &$transformer) { - if (strpos($line, '%%Search%%') !== false) { - $html = LinkPhpwikiURL( - "phpwiki:?action=search&searchterm=()&searchtype=title", - gettext("Search")); - - $line = str_replace('%%Search%%', $html, $line); - } - return $line; - } - - // wiki token: fulltext search dialog - function wtm_fulltext_search($line, &$transformer) { - if (strpos($line, '%%Fullsearch%%') !== false) { - $html = LinkPhpwikiURL( - "phpwiki:?action=search&searchterm=()&searchtype=full", - gettext("Search")); - - $line = str_replace('%%Fullsearch%%', $html, $line); - } - return $line; - } +// FIXME: some plugins are in-line (maybe?) and some are block level. +// Here we treat them all as inline, which will probably +// generate some minorly invalid HTML in some cases. +// +function wtm_plugin_link($line, &$transformer) { + // FIXME: is this good syntax? + global $dbi, $request; // FIXME: make these non-global? + + if (preg_match('/^(.*?)(<\?plugin-link\s+.*?\?>)(.*)$/', $line, $m)) { + list(, $prematch, $plugin_pi, $postmatch) = $m; + $loader = new WikiPluginLoader; + $html = $loader->expandPI($plugin_pi, $dbi, $request); + $line = $prematch . $transformer->token($html) . $postmatch; + } + return $line; +} - // wiki token: mostpopular list - function wtm_mostpopular($line, &$transformer) { - global $ScriptUrl, $dbi; - if (strpos($line, '%%Mostpopular%%') !== false) { - $query = InitMostPopular($dbi, MOST_POPULAR_LIST_LENGTH); - $html = "

    \n"; - while ($qhash = MostPopularNextMatch($dbi, $query)) { - $html .= "
    $qhash[hits] ... " . LinkExistingWikiWord($qhash['pagename']) . "\n"; - } - $html .= "
    \n"; - $line = str_replace('%%Mostpopular%%', $html, $line); - } - return $line; - } +function wtm_plugin($line, &$transformer) { + // FIXME: is this good syntax? + global $dbi, $request; // FIXME: make these non-global? + + if (preg_match('/^<\?plugin(-form)?\s.*\?>\s*$/', $line)) { + $loader = new WikiPluginLoader; + $html = $loader->expandPI($line, $dbi, $request); + $line = $transformer->SetHTMLMode('', 0) . $transformer->token($html); + } + return $line; +} ////////////////////////////////////////////////////////// @@ -613,9 +614,12 @@ function wtm_table($line, &$trfrm) return $line; } -// For emacs users +// (c-file-style: "gnu") // Local Variables: // mode: php -// c-file-style: "ellemtel" +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil // End: ?> diff --git a/lib/userauth.php b/lib/userauth.php deleted file mode 100644 index 3830a2c27..000000000 --- a/lib/userauth.php +++ /dev/null @@ -1,198 +0,0 @@ -userid = ''; - $this->state = 'login'; - $this->realm = 'PhpWiki0000'; - } - else - $this = unserialize(fix_magic_quotes_gpc($WIKI_AUTH)); - - if ($this->state == 'authorized' && $auth_mode == 'LOGIN') - { - // ...logout - $this->realm++; - $this->state = 'loggedout'; - } - - if ($auth_mode != 'LOGOUT') - { - $user = $this->_get_authenticated_userid(); - - if (!$user && $auth_mode != 'ANON_OK') - $warning = $this->_demand_http_authentication(); //NORETURN - } - - if (empty($user)) - { - // Authentication failed - if ($this->state == 'authorized') - $this->realm++; - $this->state = 'loggedout'; - $this->userid = empty($REMOTE_HOST) ? $REMOTE_ADDR : $REMOTE_HOST; - } - else - { - // Successful authentication - $this->state = 'authorized'; - $this->userid = $user; - } - - // Save state to cookie. - setcookie('WIKI_AUTH', serialize($this), 0, '/'); - if (isset($warning)) - echo $warning; - } - - function id () { - return $this->userid; - } - - function is_authenticated () { - return $this->state == 'authorized'; - } - - function is_admin () { - return $this->is_authenticated() && $this->userid == ADMIN_USER; - } - - function must_be_admin ($action = "") { - if (! $this->is_admin()) - { - if ($action) - $to_what = sprintf(gettext("to perform action '%s'"), $action); - else - $to_what = gettext("to do that"); - ExitWiki(gettext("You must be logged in as an administrator") - . " $to_what"); - } - } - - // This is a bit of a hack: - function setPreferences ($prefs) { - global $WIKI_PREFS; - $WIKI_PREFS = serialize($prefs); - $expires = time() + 365 * 24 * 3600; // expire in a year - setcookie('WIKI_PREFS', $WIKI_PREFS, $expires, '/'); - } - - function getPreferences () { - $prefs = array('edit_area.width' => 80, - 'edit_area.height' => 22); - - global $HTTP_COOKIE_VARS; - if (isset($HTTP_COOKIE_VARS['WIKI_PREFS'])) { - $prefcookie = $HTTP_COOKIE_VARS['WIKI_PREFS']; - if (is_string($prefcookie)) { - $prefcookie = unserialize(fix_magic_quotes_gpc($prefcookie)); - if (is_array($prefcookie)) { - while (list($k, $v) = each($prefs)) - if (!empty($prefcookie[$k])) - $prefs[$k] = $prefcookie[$k]; - } - } - } - - // Some sanity checks. (FIXME: should move somewhere else) - if (!($prefs['edit_area.width'] >= 30 && $prefs['edit_area.width'] <= 150)) - $prefs['edit_area.width'] = 80; - if (!($prefs['edit_area.height'] >= 5 && $prefs['edit_area.height'] <= 80)) - $prefs['edit_area.height'] = 22; - return $prefs; - } - - function _get_authenticated_userid () { - if ( ! ($user = $this->_get_http_authenticated_userid()) ) - return false; - - switch ($this->state) { - case 'login': - // Either we just asked for a password, or cookies are not enabled. - // In either case, proceed with successful login. - return $user; - case 'loggedout': - // We're logged out. Ignore http authed user. - return false; - default: - // Else, as long as the user hasn't changed, fine. - if ($user && $user != $this->userid) - return false; - return $user; - } - } - - function _get_http_authenticated_userid () { - global $PHP_AUTH_USER, $PHP_AUTH_PW; - global $WikiNameRegexp; - - if (empty($PHP_AUTH_USER)) - return false; - - if ($PHP_AUTH_USER == ADMIN_USER) { - if (empty($PHP_AUTH_PW) || $PHP_AUTH_PW != ADMIN_PASSWD) - return false; - } - else if (! ALLOW_BOGO_LOGIN) { - return false; - } - else if (! preg_match('/\A' . $WikiNameRegexp . '\z/', $PHP_AUTH_USER)) { - return false; - } - - return $PHP_AUTH_USER; - } - - function _demand_http_authentication () { - if (!defined('ADMIN_USER') || !defined('ADMIN_PASSWD') - || ADMIN_USER == '' || ADMIN_PASSWD =='') { - return - "

    " - . gettext("You must set the administrator account and password before you can log in.") - . "

    \n"; - } - - // Request password - $this->userid = ''; - $this->state = 'login'; - setcookie('WIKI_AUTH', serialize($this), 0, '/'); - header('WWW-Authenticate: Basic realm="' . $this->realm . '"'); - header("HTTP/1.0 401 Unauthorized"); - if (ACCESS_LOG) - $LogEntry->status = 401; - echo "

    " . gettext ("You entered an invalid login or password.") . "\n"; - if (ALLOW_BOGO_LOGIN) { - echo "

    "; - echo gettext ("You can log in using any valid WikiWord as a user ID.") . "\n"; - echo gettext ("(Any password will work, except, of course for the admin user.)") . "\n"; - } - - ExitWiki(); - } -} - -// For emacs users -// Local Variables: -// mode: php -// c-file-style: "ellemtel" -// End: -?> diff --git a/lib/ziplib.php b/lib/ziplib.php index fc86ad669..dab88dc2e 100644 --- a/lib/ziplib.php +++ b/lib/ziplib.php @@ -1,11 +1,5 @@ Warning: " . htmlspecialchars($msg) . "
    \n"; -} +rcs_id('$Id: ziplib.php,v 1.8 2001-09-18 19:16:23 dairiki Exp $'); /** * GZIP stuff. @@ -512,46 +506,32 @@ function MimeMultipart ($parts) return $head . $sep . implode($sep, $parts) . "\r\n--${boundary}--\r\n"; } -function MimeifyPage ($pagehash) { - extract($pagehash); - // FIXME: add 'hits' to $params - $params = array('pagename' => rawurlencode($pagename), - 'author' => rawurlencode($author), - 'version' => $version, - 'flags' =>"", - 'lastmodified' => $lastmodified, - 'created' => $created); - - if (($flags & FLAG_PAGE_LOCKED) != 0) - $params['flags'] = 'PAGE_LOCKED'; - - if (isset($refs) && is_array($refs)) - { - // phpwiki's with versions > 1.2.x shouldn't have references. - for ($i = 1; $i <= 12 /*NUM_LINKS*/; $i++) - if (!empty($refs[$i])) - $params["ref$i"] = rawurlencode($refs[$i]); - } - - $out = MimeContentTypeHeader('application', 'x-phpwiki', $params); - $out .= "Content-Transfer-Encoding: quoted-printable\r\n"; - $out .= "\r\n"; - - reset($content); - while (list($junk, $line) = each($content)) - $out .= QuotedPrintableEncode(chop($line)) . "\r\n"; - return $out; -} +function MimeifyPageRevision ($revision) { + $page = $revision->getPage(); + // FIXME: add 'hits' to $params + $params = array('pagename' => rawurlencode($page->getName()), + 'author' => rawurlencode($revision->get('author')), + 'version' => $revision->getVersion(), + 'flags' =>"", + 'lastmodified' => $revision->get('mtime')); + if ($page->get('mtime')) + $params['created'] = $page->get('mtime'); + if ($page->get('locked')) + $params['flags'] = 'PAGE_LOCKED'; + if ($revision->get('author_id')) + $params['author_id'] = $revision->get('author_id'); + -function MimeifyPages ($pagehashes) -{ - $npages = sizeof($pagehashes); - for ($i = 0; $i < $npages; $i++) - $parts[$i] = MimeifyPage($pagehashes[$i]); - return $npages == 1 ? $parts[0] : MimeMultipart($parts); + $out = MimeContentTypeHeader('application', 'x-phpwiki', $params); + $out .= "Content-Transfer-Encoding: quoted-printable\r\n"; + $out .= "\r\n"; + + foreach ($revision->getContent() as $line) { + $out .= QuotedPrintableEncode(chop($line)) . "\r\n"; + } + return $out; } - /** * Routines for parsing Mime-ified phpwiki pages. */ @@ -675,56 +655,83 @@ function GenerateFootnotesFromRefs($params) // references. function ParseMimeifiedPages ($data) { - if (!($headers = ParseRFC822Headers($data)) - || empty($headers['content-type'])) - { - //warn("Can't find content-type header"); - return false; + if (!($headers = ParseRFC822Headers($data)) + || empty($headers['content-type'])) { + //trigger_error("Can't find content-type header", E_USER_WARNING); + return false; } - $typeheader = $headers['content-type']; + $typeheader = $headers['content-type']; - if (!(list ($type, $subtype, $params) = ParseMimeContentType($typeheader))) - { - warn("Can't parse content-type: (" - . htmlspecialchars($typeheader) . ")"); - return false; + if (!(list ($type, $subtype, $params) = ParseMimeContentType($typeheader))) { + trigger_error("Can't parse content-type: (" + . htmlspecialchars($typeheader) . ")", E_USER_WARNING); + return false; } - if ("$type/$subtype" == 'multipart/mixed') - return ParseMimeMultipart($data, $params['boundary']); - else if ("$type/$subtype" != 'application/x-phpwiki') - { - warn("Bad content-type: $type/$subtype"); - return false; + if ("$type/$subtype" == 'multipart/mixed') { + return ParseMimeMultipart($data, $params['boundary']); + } + else if ("$type/$subtype" != 'application/x-phpwiki') { + trigger_error("Bad content-type: $type/$subtype", E_USER_WARNING); + return false; } - // FIXME: more sanity checking? - $pagehash = array('pagename' => '', - 'author' => '', - 'version' => 0, - 'lastmodified' => '', - 'created' => ''); - while(list($key, $val) = each ($pagehash)) - if (!empty($params[$key])) - $pagehash[$key] = rawurldecode($params[$key]); - - $pagehash['flags'] = 0; - if (!empty($params['flags'])) - { - if (preg_match('/PAGE_LOCKED/', $params['flags'])) - $pagehash['flags'] |= FLAG_PAGE_LOCKED; + // FIXME: more sanity checking? + $page = array(); + $pagedata = array(); + $versiondata = array(); + + foreach ($params as $key => $value) { + if (empty($value)) + continue; + $value = rawurldecode($value); + switch ($key) { + case 'pagename': + case 'version': + $page[$key] = $value; + break; + case 'flags': + if (preg_match('/PAGE_LOCKED/', $value)) + $pagedata['locked'] = 'yes'; + break; + case 'created': + $pagedata[$key] = $value; + break; + case 'lastmodified': + $versiondata['mtime'] = $value; + break; + case 'author': + case 'author_id': + $versiondata[$key] = $value; + break; + } + } + + // FIXME: do we need to try harder to find a pagename if we + // haven't got one yet? + if (!isset($versiondata['author'])) { + $versiondata['author'] = $GLOBALS['user']->id(); } + + $encoding = strtolower($headers['content-transfer-encoding']); + if ($encoding == 'quoted-printable') + $data = QuotedPrintableDecode($data); + else if ($encoding && $encoding != 'binary') + ExitWiki("Unknown encoding type: $encoding"); - $encoding = strtolower($headers['content-transfer-encoding']); - if ($encoding == 'quoted-printable') - $data = QuotedPrintableDecode($data); - else if ($encoding && $encoding != 'binary') - ExitWiki("Unknown encoding type: $encoding"); + $data .= GenerateFootnotesFromRefs($params); - $data .= GenerateFootnotesFromRefs($params); - - $pagehash['content'] = preg_split('/[ \t\r]*\n/', chop($data)); + $page['content'] = preg_replace('/[ \t\r]*\n/', "\n", chop($data)); + $page['pagedata'] = $pagedata; + $page['versiondata'] = $versiondata; - return array($pagehash); + return array($page); } +// Local Variables: +// mode: php +// tab-width: 8 +// c-basic-offset: 4 +// c-hanging-comment-ender-p: nil +// indent-tabs-mode: nil +// End: ?> diff --git a/pgsrc/BackLinks b/pgsrc/BackLinks new file mode 100644 index 000000000..8d348777e --- /dev/null +++ b/pgsrc/BackLinks @@ -0,0 +1,11 @@ +From hostmaster@dairiki.org Sat Feb 10 21:28:19 2001 +Subject: BackLinks +From: hostmaster@dairiki.org (PhpWiki) +Date: Fri, 31 Aug 2001 22:07:14 -0700 +Mime-Version: 1.0 +Content-Type: application/x-phpwiki; + pagename=BackLinks; + flags=PAGE_LOCKED +Content-Transfer-Encoding: binary + + diff --git a/pgsrc/DebugInfo b/pgsrc/DebugInfo new file mode 100644 index 000000000..d82d8693b --- /dev/null +++ b/pgsrc/DebugInfo @@ -0,0 +1 @@ + diff --git a/pgsrc/FindPage b/pgsrc/FindPage index 4c17dda25..af342c3af 100644 --- a/pgsrc/FindPage +++ b/pgsrc/FindPage @@ -7,11 +7,11 @@ Here are some good starting points for browsing. Here's a title search. Try something like ''wiki'' or ''sandwich''. -%%Search%% + Use the following for a full text search. This takes a few seconds. The results will show all lines on a given page that contain a match. -%%Fullsearch%% + ------ Separate words with a space. All words have to match. diff --git a/pgsrc/FullTextSearch b/pgsrc/FullTextSearch new file mode 100644 index 000000000..d0476caeb --- /dev/null +++ b/pgsrc/FullTextSearch @@ -0,0 +1,15 @@ +From hostmaster@dairiki.org Sat Feb 10 21:28:19 2001 +Subject: FullTextSearch +From: hostmaster@dairiki.org (PhpWiki) +Date: Wed, 12 Sep 2001 13:49:06 -0700 +Mime-Version: 1.0 (Produced by PhpWiki 1.1.x) +Content-Type: application/x-phpwiki; + pagename=FullTextSearch; + flags=PAGE_LOCKED +Content-Transfer-Encoding: binary + + +---- +Search Again: + + diff --git a/pgsrc/LikePages b/pgsrc/LikePages new file mode 100644 index 000000000..b9a4ed6c8 --- /dev/null +++ b/pgsrc/LikePages @@ -0,0 +1,6 @@ +LikePages are pages which share an initial or final word with that page. + +See also Wiki:LikePages and MeatBall:LikePages. +---- + + diff --git a/pgsrc/MostPopular b/pgsrc/MostPopular index a682a3d23..ac5705cb0 100644 --- a/pgsrc/MostPopular +++ b/pgsrc/MostPopular @@ -1,4 +1,2 @@ -The 20 most popular pages of this wiki: -(hitcount, pagename) + -%%Mostpopular%% diff --git a/pgsrc/PhpWikiAdministration b/pgsrc/PhpWikiAdministration index a9ad62e75..7fe459478 100644 --- a/pgsrc/PhpWikiAdministration +++ b/pgsrc/PhpWikiAdministration @@ -14,7 +14,7 @@ They won't work unless you have set an admin username and password in the PhpWik ----------- ! Log In - __[Log In | phpwiki:?action=login]__ +__[Log In | phpwiki:?action=login]__ This allows you to login. While you are logged in you will be able to ''lock'', ''unlock'' and ''delete'' pages. diff --git a/pgsrc/RecentChanges b/pgsrc/RecentChanges new file mode 100644 index 000000000..70c52dbbb --- /dev/null +++ b/pgsrc/RecentChanges @@ -0,0 +1,16 @@ +From hostmaster@dairiki.org Sat Feb 10 21:28:19 2001 +Subject: RecentChanges +From: hostmaster@dairiki.org (PhpWiki) +Date: Fri, 31 Aug 2001 22:07:14 -0700 +Mime-Version: 1.0 +Content-Type: application/x-phpwiki; + pagename=RecentChanges; + flags=PAGE_LOCKED +Content-Transfer-Encoding: binary + +The most recently changed pages are listed below. + +Show changes for: [1 day|phpwiki:?days=1] | [3 days|phpwiki:?days=3] | [7 days|phpwiki:?days=7] | [30 days|phpwiki:?days=30] | [90 days|phpwiki:?days=90] | [...|phpwiki:?days=-1] +-------- + +------- diff --git a/pgsrc/ReleaseNotes b/pgsrc/ReleaseNotes index 9f1ca3766..c4dad0d6c 100644 --- a/pgsrc/ReleaseNotes +++ b/pgsrc/ReleaseNotes @@ -1,3 +1,13 @@ +Jeffs Hacks. + +* New database API. See lib/WikiDB.php and lib/WikiDB/backend.php for the bulk of the documentation on this. The new database can save multiple archived versions of each page. A mechanism for purging archived revisions is now in place as well. See the $!ExpireParams setting in index.php, and lib/!ArchiveCleaner.php for details. At this point, the DBA, MySQL and Postgres backends are functional. The DBA backend could use some performance optimization. The whole API is still subject to some change as I figure out a clean way to provide for a variety of (not all forseen) methods of indexing and searching the wiki. +* New WikiPlugin architecture. +* New template engine. This needs to be documented further, but, for now, see lib/Template.php. On second thought, don't (lib/Template.php desperately needs refactoring, still.) The basic advance is that PHP4's ob_*() functions can be used to capture output from eval()ed PHP code. This allows the templates to be PHP code... +* Edit preview button on the edit page. Now you can see what your edits look like before committing them. +* Reworked the style-sheets. I still need to clean this up some more. I would appreciate comments and bug reports on it. So far I've only tested the new style sheets with Netscape 4.77 and Mozilla 0.9.3 (both running under Linux.) +* Expanded syntax for text search, now searches like "wiki or web -page" (or "(wiki or web) and not page") will do more-or-less what they look like they'll do. +* Security Issues: PhpWiki will now work with PHP's register_globals set to off. + PhpWiki 1.3.x development branch. * Pagenames in PATH_INFO. diff --git a/pgsrc/TitleSearch b/pgsrc/TitleSearch new file mode 100644 index 000000000..7f3c4a9f3 --- /dev/null +++ b/pgsrc/TitleSearch @@ -0,0 +1,15 @@ +From hostmaster@dairiki.org Sat Feb 10 21:28:19 2001 +Subject: TitleSearch +From: hostmaster@dairiki.org (PhpWiki) +Date: Wed, 12 Sep 2001 13:47:25 -0700 +Mime-Version: 1.0 (Produced by PhpWiki 1.1.x) +Content-Type: application/x-phpwiki; + pagename=TitleSearch; + flags=PAGE_LOCKED +Content-Transfer-Encoding: binary + + +---- +Search Again: + + diff --git a/pgsrc/WikiPlugin b/pgsrc/WikiPlugin new file mode 100644 index 000000000..2c787ecee --- /dev/null +++ b/pgsrc/WikiPlugin @@ -0,0 +1,48 @@ +The latest hacks include support for !WikiPlugins. + +!WikiPlugins allow one to easily add new types of dynamic content (as well as other functionality) +to wiki pages within PhpWiki. In this very wiki, the RecentChanges, BackLinks, +LikePages and DebugInfo pages are all implemented using plugins. +I expect that the search result pages, as well as much PhpWikiAdministration will +soon be implemented via plugins as well. (I think the oh-so-ugly +[MagicPhpWikiURLs] can be replaced by plugins, too.) + +!Example + +Currently, one invokes a plugin by putting something like: + + + +into a regular wiki-page. That particular example produces as list +of pages which link to the current page. Here it is: + + + +(This is great for Category and Topic pages. You can use this to get +an automatic in-line listing of pages in the Category or Topic.) + +!Details + +(This is all subject to change.) + +Plugins can take certain named arguments (most do). +The values of these arguments can be determined four different ways. +In order of precedence: +# The plugin invocation can specify the value for an argument, like so: +;;: +# The argument can be specified via an HTTP query argument. This doesn't happen (is not allowed) unless the argument is mentioned in the plugin invocation: +;;: +# Default values specified in the plugin invocation: +;;: +# The plugin must supply default values for each argument it uses. (The BackLinks plugin uses +the current page as the default value for the ''page'' argument. + + +!More Ideas for Plugins +* Search +* MostPopular +* WantedPages, OrphanPages, other various indexing schemes. +* Diff, PageHistory +* TitleSearch: LikePages (match_head, match_tail). +* Redirect plugin -- +* Calendar diff --git a/phpwiki-heavy.css b/phpwiki-heavy.css new file mode 100644 index 000000000..b682d9e52 --- /dev/null +++ b/phpwiki-heavy.css @@ -0,0 +1,32 @@ +/* + * phpwiki-heavy.css + * + * This is stuff which should be in phpwiki.css, but which breaks NS4. + */ + +/* Hide the elements we put in just to get around NS4 bugs. */ +.ns4bug, DIV.br { display: none; } + +/* This makes spacings in NS4 too big. */ +.toolbar, DIV.toolbar { margin: 0.5ex 0ex; } + +/* This break NS4, but is necessary for IE4. */ +DIV.wikitext { width: auto; } + +/* Make Wikilinks inside tags larger. */ +B .wiki, STRONG .wiki, +B .wikipage, STRONG .wikipage +{ font-size: larger; } + +/* Make wikiaction links look like buttons */ +A.wikiaction, A.wikiadmin, INPUT.button +{ + border-style: outset; + border-width: thin; + color: #006; + padding-top: 0ex; + padding-bottom: 0ex; + padding-left: 0.2em; + padding-right: 0.2em; +} + diff --git a/phpwiki.css b/phpwiki.css index e06795ec2..6e39ba829 100644 --- a/phpwiki.css +++ b/phpwiki.css @@ -1,136 +1,189 @@ /* Classes: - wikitext + DIV.wikitext - the transformed wiki page text. + + A.wiki - link to page in wiki. + A.named-wiki - a named link to page in wiki (from e.g. [name|WikiPage]). + A.interwiki - link to page in another wiki + SPAN.wikipage - page name within interwiki link. + A.named-interwiki - link to page in another wiki + A.url - link to external URL from wiki page. + A.named-url - link to external URL from wiki page. + + .wikiunknown A, .wikiunknown U + .named-wikiunknown A, .named-wikiunknown U - wikiword - A wiki page name - rawurl - A raw url (like http://some.where.com/howdy.jpg) - interwiki - An interwiki name + A.wikiaction + A.wikiadmin + .wikiunsafe + + A.backlinks - linkurl - A link to an external URL - interwikilink - A link to an external wiki - wikilink = A link to an existing page - wikiunknown = A link to a non-existing page - wikiaction - A link which to an action (edit, diff, info...) - wikiadmin - A link to an admin action (lock, remove ...) - wikiunsafe - A link to an admin action which is located on an unlocked page. +TODO: Get rid of tables in wikiaction forms. */ -TABLE.outer { background-color: black; } -TABLE.inner { background-color: white; } +/* NS4 doesn't grok @import. This allows us to put things which break NS4 + * in another file. + */ +@import url(phpwiki-heavy.css); + +BODY { background: ivory; } + +BODY { font-family: arial, helvetica, sans-serif; } /* - * WikiWords in sans-serif + * NS4, defaults from BODY don't always propagate correctly. + * So we need this: + */ +.wikitext, .toolbar, P, TD { font-family: arial, helvetica, sans-serif; } + +INPUT.button { font-family: arial, helvetica, sans-serif; } + +.wikitext PRE { font-family: monospace; } + +DIV.wikitext { + background: white; + border: thin; + border-color: black; + border-style: solid; + padding-left: 0.8em; + padding-right: 0.8em; + padding-top: 0px; + padding-bottom: 0px; + margin: 0.5ex 0px; + /* This breaks Netscape 4: (display does not go full width). + width: auto; */ -.wikiword -{ - font-family: avantgarde, helvetica, sans-serif; -} -PRE .wikiword, -TT .wikiword -{ - font-family: monospace; -} -H1 .wikiword -{ - font-size: large; -} -SMALL .wikiword -{ - font-size: small; -} - -.interwiki -{ - font-family: zapf chancery, cursive; + clear: both; } +INPUT.wikitext { margin:0px; } +DIV.toolbar { margin: 1ex 0ex; } /* - * Raw URLS in smaller font + * This is a kluge for NS4 which doesn't honor the clear: settings on + * .tool-left and .tool-right. + * + * Putting in explicit
    messes up the formatting in + * other browsers. Instead we'll put in a: + * + *


    + * + * The clear:both on DIV.br seems to work. And we'll disable the
    altogether (in + * CSS aware browsers) by setting display:none. + * + * Actually, I haven't yet found a browser which doesn't put a line break + * between successive
    's. This makes the
    completely + * unnecessary. */ -.rawurl -{ - font-family: serif; - font-size: smaller; +DIV.br { clear:both; line-height: 0px; } +.ignore { display: none; } + +DIV.errors { + background: #eee; + border: medium; + border-color: red; + border-style: solid; + padding-left: 0.8em; + padding-right: 0.8em; + padding-top: 0px; + padding-bottom: 0px; + margin: 1em; + /* This breaks Netscape 4: (display does not go full width). + width: auto; + */ + clear: both; } +.errors H4 { + color:red; + text-decoration: underline; + margin:0px; +} -/* - * No underline for wikilinks. - */ -.wikilink, -.wikiunknown, -.wikiunknown U, -.wikiaction, -.wikiadmin, -.interwikilink, -.footnote A, -.footnote-rev -{ - text-decoration: none; - /* color: #600; */ +P.error { + font-size: smaller; + font-family: monospace; + margin:0px; } -.wikiunknown, -.wikiunknown U +.error UL { - color: #600; + font-size: smaller; + font-family: monospace; } - /* - * Different backgrounds depening on link type. + * Style for
    s in wiki markup. */ -/* -.wikilink -{ - background-color: #ddc; +.wikitext HR { + background: #666; + height: 1px; + width: 90%; + margin-left:auto; + margin-right:auto; + align:center; /* for NS4 */ } + + +/* + * Link styles + */ +/* Wiki Links */ +A.wiki { text-decoration: none; } +.wiki { font-weight: bold; } +/* This is obscene in NS4 +B .wiki, STRONG .wiki { font-size: larger; } */ -.wikiaction, -.wikiaction INPUT, -.wikiaction TABLE -{ - background-color: #ddf; -} -.wikiadmin, -.wikiadmin INPUT, -.wikiadmin TABLE -{ - background-color: #fdd; -} -.wikiunsafe -{ - background-color: #eee; -} -.wikilink:link -{ - /* color: #c00; */ -} -.wikilink:visited, -{ - /* color: #600; */ -} +/* Unknown links */ +.wikiunknown A, .named-wikiunknown A, .wikiunknown U +{ text-decoration: none; } + +.wikiunknown, .named-wikiunknown +{color: #600; } +.wikiunknown A, .named-wikiunknown +{ color: white; background-color: #600; } + + +/* Interwiki links */ +A.interwiki { text-decoration: none; } +.wikipage { font-weight: bold; } + +.interwiki, +I .interwiki .wikipage, EM .interwiki .wikipage +{ font-style: oblique; } +.interwiki .wikipage, +I .interwiki, EM .interwiki +{ font-style: normal; } /* - * Special colors for the '?' after unknown wikiwords. + * wikiaction, wikiadmin, wikiunsafe: */ -A.wikiunknown -{ - color: #fffff0; - background-color: #663333; - font-family: avantgarde, helvetica, sans-serif; - text-decoration: none; -} -PRE A.wikiunknown, -TT A.wikiunknown +A.wikiaction, A.wikiadmin { text-decoration: none; } +A.wikiaction, .wikiaction TABLE, SPAN.wikiaction { background-color: #ddd; } +A.wikiadmin, .wikiadmin TABLE { background-color: #fdd; } +.wikiunsafe { background-color: #ccc; } + +/* + * Put a border around wikiaction forms: + * This doesn't work for NS4. + */ +.wikiaction TABLE, .wikiadmin TABLE { - font-family: monospace; + border-style: ridge; + border-width: medium; } +.wikiaction TABLE { border-color: #9cf; } +.wikiadmin TABLE { border-color: #f99; } + +/* Backlinks */ +A.backlinks { color: #006; } +/* Make the textarea on the edit page full width */ +TEXTAREA.wikiedit { width: 100%; margin-top: 1ex; } /* For emacs users * diff --git a/schemas/schema.mysql b/schemas/schema.mysql index 1dbe2e9d0..5544b2aed 100644 --- a/schemas/schema.mysql +++ b/schemas/schema.mysql @@ -1,60 +1,47 @@ --- $Id: schema.mysql,v 1.6 2001-07-17 15:58:28 dairiki Exp $ +-- $Id: schema.mysql,v 1.7 2001-09-18 19:16:23 dairiki Exp $ + +drop table if exists page; +CREATE TABLE page ( + id INT NOT NULL, + pagename VARCHAR(100) BINARY NOT NULL, + hits INT NOT NULL DEFAULT 0, + pagedata MEDIUMTEXT NOT NULL DEFAULT '', + PRIMARY KEY (id), + UNIQUE KEY (pagename) +); + +drop table if exists version; +CREATE TABLE version ( + id INT NOT NULL, + version INT NOT NULL, + mtime INT NOT NULL, + minor_edit TINYINT DEFAULT 0, + content MEDIUMTEXT NOT NULL DEFAULT '', + versiondata MEDIUMTEXT NOT NULL DEFAULT '', + PRIMARY KEY (id,version), + INDEX (mtime) +); + +drop table if exists recent; +CREATE TABLE recent ( + id INT NOT NULL, + latestversion INT, + latestmajor INT, + latestminor INT, + PRIMARY KEY (id) +); + +drop table if exists nonempty; +CREATE TABLE nonempty ( + id INT NOT NULL, + PRIMARY KEY (id) +); + +drop table if exists link; +CREATE TABLE link ( + linkfrom INT NOT NULL, + linkto INT NOT NULL, + INDEX (linkfrom), + INDEX (linkto) +); -drop table if exists phpwiki_pages; -drop table if exists phpwiki_archive; -drop table if exists phpwiki_links; -drop table if exists phpwiki_hitcount; -drop table if exists phpwiki_score; -drop table if exists phpwiki_hottopics; - - -CREATE TABLE phpwiki_pages ( - pagename VARCHAR(100) NOT NULL, - version INT NOT NULL DEFAULT 1, - flags INT NOT NULL DEFAULT 0, - author VARCHAR(100), - lastmodified INT NOT NULL, - created INT NOT NULL, - content MEDIUMTEXT NOT NULL, - refs TEXT, - PRIMARY KEY (pagename) - ); - -CREATE TABLE phpwiki_archive ( - pagename VARCHAR(100) NOT NULL, - version INT NOT NULL DEFAULT 1, - flags INT NOT NULL DEFAULT 0, - author VARCHAR(100), - lastmodified INT NOT NULL, - created INT NOT NULL, - content MEDIUMTEXT NOT NULL, - refs TEXT, - PRIMARY KEY (pagename, version) - ); - -CREATE TABLE phpwiki_links ( - frompage VARCHAR(100) NOT NULL, - topage VARCHAR(100) NOT NULL, - PRIMARY KEY (frompage, topage) - ); - -CREATE TABLE phpwiki_hitcount ( - pagename VARCHAR(100) NOT NULL, - hits INT NOT NULL DEFAULT 0, - PRIMARY KEY (pagename) - ); - -CREATE TABLE phpwiki_score ( - pagename VARCHAR(100) NOT NULL, - score INT NOT NULL DEFAULT 0, - PRIMARY KEY (pagename) - ); - - --- tables below are not yet used - -CREATE TABLE phpwiki_hottopics ( - pagename VARCHAR(100) NOT NULL, - lastmodified INT NOT NULL, - PRIMARY KEY (pagename, lastmodified) - ); diff --git a/schemas/schema.psql b/schemas/schema.psql index c7749d511..d1892af63 100644 --- a/schemas/schema.psql +++ b/schemas/schema.psql @@ -1,65 +1,126 @@ --- $Id: schema.psql,v 1.8 2001-07-18 04:59:47 uckelman Exp $ - -drop table phpwiki_pages; -drop table phpwiki_archive; -drop table phpwiki_links; -drop table phpwiki_hottopics; -drop table phpwiki_hitcount; -drop table phpwiki_score; - -CREATE TABLE phpwiki_pages ( - pagename VARCHAR(100) NOT NULL, - version INT NOT NULL DEFAULT 1, - flags INT NOT NULL DEFAULT 0, - author VARCHAR(100), - lastmodified INT NOT NULL, - created INT NOT NULL, - content TEXT NOT NULL, - refs TEXT, - PRIMARY KEY (pagename) - ); - -CREATE TABLE phpwiki_archive ( - pagename VARCHAR(100) NOT NULL, - version INT NOT NULL DEFAULT 1, - flags INT NOT NULL DEFAULT 0, - author VARCHAR(100), - lastmodified INT NOT NULL, - created INT NOT NULL, - content TEXT NOT NULL, - refs TEXT, - PRIMARY KEY (pagename, version) - ); - -CREATE TABLE phpwiki_links ( - frompage VARCHAR(100) NOT NULL, - topage VARCHAR(100) NOT NULL, - PRIMARY KEY (frompage, topage) - ); - -CREATE TABLE phpwiki_hottopics ( - pagename VARCHAR(100) NOT NULL, - lastmodified INT NOT NULL, - PRIMARY KEY (pagename, lastmodified) - ); - -CREATE TABLE phpwiki_hitcount ( - pagename VARCHAR(100) NOT NULL, - hits INT NOT NULL DEFAULT 0, - PRIMARY KEY (pagename) - ); - -CREATE TABLE phpwiki_score ( - pagename VARCHAR(100) NOT NULL, - score INT NOT NULL DEFAULT 0, - PRIMARY KEY (pagename) - ); - - -GRANT ALL ON phpwiki_pages TO nobody; -GRANT ALL ON phpwiki_archive TO nobody; -GRANT ALL ON phpwiki_links TO nobody; -GRANT ALL ON phpwiki_hottopics TO nobody; -GRANT ALL ON phpwiki_hitcount TO nobody; -GRANT ALL ON phpwiki_score TO nobody; +-- $Id: schema.psql,v 1.9 2001-09-18 19:16:23 dairiki Exp $ +\set QUIET + + +--================================================================ +-- Prefix for table names. +-- +-- You should set this to the same value you specify for +-- $DBParams['prefix'] in index.php. + +\set prefix '' + +--================================================================ +-- Which postgres user gets access to the tables? +-- +-- You should set this to the name of the postgres +-- user who will be accessing the tables. +-- +-- Commonly, connections from php are made under +-- the user name of 'nobody' or 'apache'. + +\set httpd_user 'apache' + +--================================================================ +-- +-- Don't modify below this point unless you know what you are doing. +-- +--================================================================ + +\set qprefix '\'' :prefix '\'' +\set qhttp_user '\'' :httpd_user '\'' +\echo Initializing PhpWiki tables with: +\echo ' prefix = ' :qprefix +\echo ' httpd_user = ' :qhttp_user +\echo +\echo 'Expect some \'Relation \'*\' does not exists\' errors unless you are' +\echo 'overwriting existing tables.' + +\set page_tbl :prefix 'page' +\set page_id :prefix 'page_id' +\set page_nm :prefix 'page_nm' + +\set version_tbl :prefix 'version' +\set vers_id :prefix 'vers_id' +\set vers_mtime :prefix 'vers_mtime' + +\set recent_tbl :prefix 'recent' +\set recent_id :prefix 'recent_id' + +\set nonempty_tbl :prefix 'nonempty' +\set nonmt_id :prefix 'nonmt_id' + +\set link_tbl :prefix 'link' +\set link_from :prefix 'link_from' +\set link_to :prefix 'link_to' + +\echo Dropping :page_tbl +DROP TABLE :page_tbl; +\echo Creating :page_tbl +CREATE TABLE :page_tbl ( + id INT NOT NULL, + pagename VARCHAR(100) NOT NULL, + hits INT NOT NULL DEFAULT 0, + pagedata TEXT NOT NULL DEFAULT '' +); +CREATE UNIQUE INDEX :page_id + ON :page_tbl (id); +CREATE UNIQUE INDEX :page_nm + ON :page_tbl (pagename); + +\echo Dropping :version_tbl +DROP TABLE :version_tbl; +\echo Creating :version_tbl +CREATE TABLE :version_tbl ( + id INT NOT NULL, + version INT NOT NULL, + mtime INT NOT NULL, +--FIXME: should use boolean, but that returns 't' or 'f'. not 0 or 1. + minor_edit INT2 DEFAULT 0, + content TEXT NOT NULL DEFAULT '', + versiondata TEXT NOT NULL DEFAULT '' +); +CREATE UNIQUE INDEX :vers_id + ON :version_tbl (id,version); +CREATE INDEX :vers_mtime + ON :version_tbl (mtime); + +\echo Dropping :recent_tbl +DROP TABLE :recent_tbl; +\echo Creating :recent_tbl +CREATE TABLE :recent_tbl ( + id INT NOT NULL, + latestversion INT, + latestmajor INT, + latestminor INT +); +CREATE UNIQUE INDEX :recent_id + ON :recent_tbl (id); + + +\echo Dropping :nonempty_tbl +DROP TABLE :nonempty_tbl; +\echo Creating :nonempty_tbl +CREATE TABLE :nonempty_tbl ( + id INT NOT NULL +); +CREATE UNIQUE INDEX :nonmt_id + ON :nonempty_tbl (id); + +\echo Dropping :link_tbl +DROP TABLE :link_tbl; +\echo Creating :link_tbl +CREATE TABLE :link_tbl ( + linkfrom INT NOT NULL, + linkto INT NOT NULL +); +CREATE INDEX :link_from ON :link_tbl (linkfrom); +CREATE INDEX :link_to ON :link_tbl (linkto); + + +GRANT ALL ON :page_tbl TO :httpd_user; +GRANT ALL ON :version_tbl TO :httpd_user; +GRANT ALL ON :recent_tbl TO :httpd_user; +GRANT ALL ON :nonempty_tbl TO :httpd_user; +GRANT ALL ON :link_tbl TO :httpd_user; diff --git a/templates/README b/templates/README index f1203f737..5e0d8b38b 100644 --- a/templates/README +++ b/templates/README @@ -1,4 +1,4 @@ -$Id: README,v 1.6 2001-04-06 18:21:37 wainstead Exp $ +$Id: README,v 1.7 2001-09-18 19:16:24 dairiki Exp $ PhpWiki distinguishes between three different templates: @@ -17,6 +17,17 @@ PhpWiki distinguishes between three different templates: The location of the template files is defined in lib/config.php The specification is relative to the index.php script. + +========= +Tue Sep 18 12:00:12 PDT 2001 Jeff Dairiki + +The rest of this file is currently obsolete, and should be ignored. +The template situation is in a state of flux, and will probably be +changed again very soon. See lib/Template.php for more, but mostly +just "stay tuned" for details. + +========= + Template files are regular HTML files, which contain special placeholders. Placeholders are surrounded by '###'. You must not use '###' for other purposes. diff --git a/templates/browse.html b/templates/browse.html index 93d612304..f166e7544 100644 --- a/templates/browse.html +++ b/templates/browse.html @@ -1,58 +1,129 @@ - + -###ROBOTS_META### - -###SPLIT_PAGE### - + + +${SPLIT_PAGE} + + - -

    [phpwiki] -###PAGE###

    - -###IF:!ANONYMOUS### -

    You are logged in as ###USERID###. -[Log Out]

    -###IF:ADMIN### -###IF LOCK###[Unlock page] -###IF !LOCK###[Lock page] -- - -[Remove page] -###ENDIF:ADMIN### -
    -###ENDIF:!ANONYMOUS### - -###IF:ANONYMOUS### -If you would like to, you may -log in -(use any WikiWord as a user ID, any password.)

    -###ENDIF:ANONYMOUS### - - - -
    - -
    ###CONTENT###
    -
    -###IF ADMIN###Edit Revision ###VERSION### -###IF:!ADMIN### -###IF LOCK###Page locked, revision ###VERSION### -###IF !LOCK###Edit Revision ###VERSION### -###ENDIF:!ADMIN### -| Created ###LASTMODIFIED### -| Info -| Diff -
    FindPage -by browsing or searching - -
    -###RELATEDPAGES### + +

    +
    [phpwiki]
    + + + +

    +

    + +

    Note: You are viewing an old revision of this page. + Click here to view the current version.

    +
    + +
    ${CONTENT}
    +
    +
    + + + + +
    + + Last edited on ${LASTMODIFIED} + + Version ${VERSION}, saved on ${LASTMODIFIED} + + + + | + + + | +
    + + + +
    + get('locked') && !$user->is_admin()) { ?> + Page locked + + + Edit + + Edit old revision + + + is_admin()) { ?> + get('locked')) { ?> + | Unlock page + + | Lock page + + | Remove page + + | History + + | Diff + + | Diff + + + is_authenticated()) { ?> + You are signed in as + | SignOut + + SignIn + +
    + +${RELATEDPAGES} +*/ ?> +flushPostponedErrors(); +$errmsgs = ob_get_contents(); +ob_end_clean(); +if ($errmsgs) + echo "

    PHP Warnings

    $errmsgs
    "; +?> +
    + + +
    +
    diff --git a/templates/editpage.html b/templates/editpage.html index 7416580f6..6f661a2b5 100644 --- a/templates/editpage.html +++ b/templates/editpage.html @@ -1,58 +1,112 @@ - + + -###ROBOTS_META### - -Edit: ###SPLIT_PAGE### - + + + +Edit: ${SPLIT_PAGE} - -
    -

    [phpwiki] -Edit ###PAGE### -

    + +

    +
    [phpwiki]
    +Edit ${PAGE} +

    -###IF !CURRENT###Editing old revision ###VERSION###. Saving this page will replace the current version with this text. + +

    Preview only! Changes not saved.

    +
    ${PREVIEW_CONTENT}
    +
    + - -
    -###IF:MINOR_EDIT_CHECKBOX### - ###MINOR_EDIT_CHECKBOX### This is a minor change, don't archive old version. -
    -###ENDIF:MINOR_EDIT_CHECKBOX### - -I can't type tabs. -Please ConvertSpacesToTabs -for me when I save. -

    You can change the size of the editing area. -See UserPreferences.

    -

    GoodStyle tips for editing.

    + +

    Warning: You are editing an old revision. + Saving this page will overwrite the current version.

    +
    + + + + + + +
    + Summary: +
    + + + + +
    + + This is a minor change. + + + | + + is_authenticated()) { ?> + You are signed in as + + Author will be logged as ${USERID}. + +
    +

    + You can change the size of the editing area. + See . + Also see tips for editing. +


    -Emphasis: '' for italics, __ for bold, ''__ for both -
    Lists: * for bullet lists, # for numbered lists, ''; term : definition'' for definition lists -
    References: JoinCapitalizedWords or use square brackets for a [page link] or URL [http://cool.wiki.int/]. -
    Footnotes: Use [1],[2],[3],... -
    Preventing linking: Avoid linking with "!": -!DoNotHyperlink, name links like [[text | URL] (double up on the "[") -
    Misc:"!", "!!", "!!!" make headings, -"%%%" makes a linebreak, "- - - -" makes a horizontal rule -
    more on -TextFormattingRules +Emphasis: + '' for italics, + __ for bold, + ''__ for both +
    Lists: + * for bullet lists, + # for numbered lists, + ''; term : definition'' for definition lists +
    References: + JoinCapitalizedWords or use square brackets for a [page link] + or URL [http://cool.wiki.int/]. +
    Footnotes: + Use [1],[2],[3],... +
    Preventing linking: + Avoid linking with "!": !DoNotHyperlink, + name links like [[text | URL] (double up on the "[") +
    Misc: + "!", "!!", "!!!" make headings, + "%%%" makes a linebreak, + "- - - -" makes a horizontal rule +
    +more on
    - - + + +
    diff --git a/templates/message.html b/templates/message.html index 420718be1..05a73cd13 100644 --- a/templates/message.html +++ b/templates/message.html @@ -1,21 +1,28 @@ - + + -###ROBOTS_META### - -###SPLIT_PAGE### - + + +${TITLE} + +

    +
    [phpwiki]
    +${TITLE} +

    - -

    [phpwiki] -###PAGE###

    - -###CONTENT### +${CONTENT} -- 2.45.0