4 * Copyright 2004 $ThePhpWikiProgrammingTeam
6 * This file is part of PhpWiki.
8 * PhpWiki is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * PhpWiki is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with PhpWiki; if not, write to the Free Software
20 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 * CreateBib: Automatically create a BibTex file from page
28 * <<CreateBib pagename||=whatever >>
30 * @author: Lea Viljanen
33 class WikiPlugin_CreateBib
37 return _("CreateBib");
40 function getDescription() {
41 return _("Automatically create a Bibtex file from linked pages");
44 function getDefaultArguments() {
45 return array( 'pagename' => '[pagename]', // The page from which the BibTex file is generated
49 // Have to include the $starttag and $endtag to the regexps...
50 function extractBibTeX (&$content, $starttag, $endtag)
56 for ($i=0; $i<count($content); $i++)
58 // $starttag shows when to start
59 if (preg_match('/^@/',$content[$i],$match)) {
62 // $endtag shows when to stop
63 else if (preg_match('/^\}/',$content[$i],$match)) {
67 $bib[] = $content[$i];
68 if ($stop) $start = false;
74 // Extract article links. Current markup is by * characters...
75 // Assume straight list
76 function extractArticles (&$content) {
78 for ($i=0; $i<count($content); $i++) {
79 // Should match "* [WikiPageName] whatever"
80 //if (preg_match('/^\s*\*\s+(\[.+\])/',$content[$i],$match))
81 if (preg_match('/^\s*\*\s+\[(.+)\]/',$content[$i],$match))
83 $articles[] = $match[1];
90 function dumpFile(&$thispage, $filename) {
91 include_once("lib/loadsave.php");
92 $mailified = MailifyPage($thispage);
94 $attrib = array('mtime' => $thispage->get('mtime'), 'is_ascii' => 1);
96 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $filename);
97 $zip->addRegularFile( FilenameForPage($thispage->getName()),
103 function run($dbi, $argstr, $request, $basepage) {
104 extract($this->getArgs($argstr, $request));
106 // Expand relative page names.
107 $page = new WikiPageName($pagename, $basepage);
108 $pagename = $page->name;
111 return $this->error(_("no page specified"));
114 // Get the links page contents
115 $page = $dbi->getPage($pagename);
116 $current = $page->getCurrentRevision();
117 $content = $current->getContent();
119 // Prepare the button to trigger dumping
120 $dump_url = $request->getURLtoSelf(array("file" => "tube.bib"));
122 $dump_button = $WikiTheme->makeButton("To File",
125 $html = HTML::div(array('class' => 'bib','align' => 'left'));
126 $html->pushContent($dump_button, ' ');
127 $list = HTML::pre(array('id'=>'biblist', 'class' => 'bib'));
129 // Let's find the subpages
130 if ($articles = $this->extractArticles($content)) {
131 foreach ($articles as $h) {
133 // Now let's get the bibtex information from that subpage
134 $subpage = $dbi->getPage($h);
135 $subversion = $subpage->getCurrentRevision();
136 $subcontent = $subversion->getContent();
138 $bib = $this->extractBibTeX($subcontent, "@", "}");
140 // ...and finally just push the bibtex data to page
141 $foo = implode("\n", $bib);
142 $bar = $foo . "\n\n";
143 $list->pushContent(HTML::raw($bar));
146 $html->pushContent($list);
148 if ($request->getArg('file')) {
149 // Yes, we want to dump this somewhere
150 // Get the contents of this page
151 $p = $dbi->getPage($pagename);
152 $c = $p->getCurrentRevision();
153 $pagedata = $c->getContent();
154 $this->dumpFile($pagedata, $request->getArg('file'));
165 // c-hanging-comment-ender-p: nil
166 // indent-tabs-mode: nil