4 * Copyright 2004 $ThePhpWikiProgrammingTeam
6 * This file is part of PhpWiki.
8 * PhpWiki is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * PhpWiki is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License along
19 * with PhpWiki; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 * CreateBib: Automatically create a BibTex file from page
28 * <<CreateBib pagename||=whatever >>
30 * @author: Lea Viljanen
33 class WikiPlugin_CreateBib
38 return _("CreateBib");
41 function getDescription()
43 return _("Automatically create a Bibtex file from linked pages");
46 function getDefaultArguments()
48 return array('pagename' => '[pagename]', // The page from which the BibTex file is generated
52 // Have to include the $starttag and $endtag to the regexps...
53 function extractBibTeX(&$content, $starttag, $endtag)
59 for ($i = 0; $i < count($content); $i++) {
60 // $starttag shows when to start
61 if (preg_match('/^@/', $content[$i], $match)) {
63 } // $endtag shows when to stop
64 else if (preg_match('/^\}/', $content[$i], $match)) {
68 $bib[] = $content[$i];
69 if ($stop) $start = false;
75 // Extract article links. Current markup is by * characters...
76 // Assume straight list
77 function extractArticles(&$content)
80 for ($i = 0; $i < count($content); $i++) {
81 // Should match "* [WikiPageName] whatever"
82 //if (preg_match('/^\s*\*\s+(\[.+\])/',$content[$i],$match))
83 if (preg_match('/^\s*\*\s+\[(.+)\]/', $content[$i], $match)) {
84 $articles[] = $match[1];
90 function dumpFile(&$thispage, $filename)
92 include_once 'lib/loadsave.php';
93 $mailified = MailifyPage($thispage);
95 $attrib = array('mtime' => $thispage->get('mtime'), 'is_ascii' => 1);
97 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $filename);
98 $zip->addRegularFile(FilenameForPage($thispage->getName()),
104 function run($dbi, $argstr, $request, $basepage)
106 extract($this->getArgs($argstr, $request));
108 // Expand relative page names.
109 $page = new WikiPageName($pagename, $basepage);
110 $pagename = $page->name;
113 return $this->error(sprintf(_("A required argument ā%sā is missing."), 'pagename'));
116 // Get the links page contents
117 $page = $dbi->getPage($pagename);
118 $current = $page->getCurrentRevision();
119 $content = $current->getContent();
121 // Prepare the button to trigger dumping
122 $dump_url = $request->getURLtoSelf(array("file" => "tube.bib"));
124 $dump_button = $WikiTheme->makeButton("To File",
127 $html = HTML::div(array('class' => 'bib', 'align' => 'left'));
128 $html->pushContent($dump_button, ' ');
129 $list = HTML::pre(array('id' => 'biblist', 'class' => 'bib'));
131 // Let's find the subpages
132 if ($articles = $this->extractArticles($content)) {
133 foreach ($articles as $h) {
135 // Now let's get the bibtex information from that subpage
136 $subpage = $dbi->getPage($h);
137 $subversion = $subpage->getCurrentRevision();
138 $subcontent = $subversion->getContent();
140 $bib = $this->extractBibTeX($subcontent, "@", "}");
142 // ...and finally just push the bibtex data to page
143 $foo = implode("\n", $bib);
144 $bar = $foo . "\n\n";
145 $list->pushContent(HTML::raw($bar));
148 $html->pushContent($list);
150 if ($request->getArg('file')) {
151 // Yes, we want to dump this somewhere
152 // Get the contents of this page
153 $p = $dbi->getPage($pagename);
154 return $this->dumpFile($p, $request->getArg('file'));
165 // c-hanging-comment-ender-p: nil
166 // indent-tabs-mode: nil