001/* 
002    Licensed to the Apache Software Foundation (ASF) under one
003    or more contributor license agreements.  See the NOTICE file
004    distributed with this work for additional information
005    regarding copyright ownership.  The ASF licenses this file
006    to you under the Apache License, Version 2.0 (the
007    "License"); you may not use this file except in compliance
008    with the License.  You may obtain a copy of the License at
009
010       http://www.apache.org/licenses/LICENSE-2.0
011
012    Unless required by applicable law or agreed to in writing,
013    software distributed under the License is distributed on an
014    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015    KIND, either express or implied.  See the License for the
016    specific language governing permissions and limitations
017    under the License.  
018 */
019package org.apache.wiki.xmlrpc;
020
021import java.io.UnsupportedEncodingException;
022import java.util.Calendar;
023import java.util.Collection;
024import java.util.Date;
025import java.util.Hashtable;
026import java.util.Iterator;
027import java.util.Vector;
028
029import org.apache.log4j.Logger;
030import org.apache.wiki.LinkCollector;
031import org.apache.wiki.WikiContext;
032import org.apache.wiki.WikiEngine;
033import org.apache.wiki.WikiPage;
034import org.apache.wiki.attachment.Attachment;
035import org.apache.wiki.auth.permissions.PagePermission;
036import org.apache.wiki.auth.permissions.PermissionFactory;
037import org.apache.wiki.util.TextUtil;
038import org.apache.xmlrpc.XmlRpcException;
039
040/**
041 *  Provides handlers for all RPC routines.
042 *
043 *  @since 1.6.6
044 */
045// We could use WikiEngine directly, but because of introspection it would
046// show just too many methods to be safe.
047public class RPCHandler
048    extends AbstractRPCHandler
049{
050    private static Logger log = Logger.getLogger( RPCHandler.class ); 
051
052    /**
053     *  {@inheritDoc}
054     */
055    public void initialize( WikiContext ctx )
056    {
057        super.initialize( ctx );
058    }
059
060    /**
061     *  Converts Java string into RPC string.
062     */
063    private String toRPCString( String src )
064    {
065        return TextUtil.urlEncodeUTF8( src );
066    }
067
068    /**
069     *  Converts RPC string (UTF-8, url encoded) into Java string.
070     */
071    private String fromRPCString( String src )
072    {
073        return TextUtil.urlDecodeUTF8( src );
074    }
075
076    /**
077     *  Transforms a Java string into UTF-8.
078     */
079    private byte[] toRPCBase64( String src )
080    {
081        try
082        {
083            return src.getBytes("UTF-8");
084        }
085        catch( UnsupportedEncodingException e )
086        {
087            //
088            //  You shouldn't be running JSPWiki on a platform that does not
089            //  use UTF-8.  We revert to platform default, so that the other
090            //  end might have a chance of getting something.
091            //
092            log.fatal("Platform does not support UTF-8, reverting to platform default");
093            return src.getBytes();
094        }
095    }
096
097    public String getApplicationName()
098    {
099        checkPermission( PagePermission.VIEW );
100        return toRPCString(m_engine.getApplicationName());
101    }
102
103    public Vector getAllPages()
104    {
105        checkPermission( PagePermission.VIEW );
106        Collection pages = m_engine.getRecentChanges();
107        Vector<String> result = new Vector<String>();
108
109        for( Iterator i = pages.iterator(); i.hasNext(); )
110        {
111            WikiPage p = (WikiPage) i.next();
112            if( !(p instanceof Attachment) )
113            {
114                result.add( toRPCString(p.getName()) );
115            }
116        }
117
118        return result;
119    }
120
121    /**
122     *  Encodes a single wiki page info into a Hashtable.
123     */
124    protected Hashtable<String,Object> encodeWikiPage( WikiPage page )
125    {
126        Hashtable<String, Object> ht = new Hashtable<String, Object>();
127
128        ht.put( "name", toRPCString(page.getName()) );
129
130        Date d = page.getLastModified();
131
132        //
133        //  Here we reset the DST and TIMEZONE offsets of the
134        //  calendar.  Unfortunately, I haven't thought of a better
135        //  way to ensure that we're getting the proper date
136        //  from the XML-RPC thingy, except to manually adjust the date.
137        //
138
139        Calendar cal = Calendar.getInstance();
140        cal.setTime( d );
141        cal.add( Calendar.MILLISECOND, 
142                 - (cal.get( Calendar.ZONE_OFFSET ) + 
143                    (cal.getTimeZone().inDaylightTime( d ) ? cal.get( Calendar.DST_OFFSET ) : 0 )) );
144
145        ht.put( "lastModified", cal.getTime() );
146        ht.put( "version", page.getVersion() );
147
148        if( page.getAuthor() != null )
149        {
150            ht.put( "author", toRPCString(page.getAuthor()) );
151        }
152
153        return ht;
154    }
155
156    public Vector getRecentChanges( Date since )
157    {
158        checkPermission( PagePermission.VIEW );
159        Collection pages = m_engine.getRecentChanges();
160        Vector<Hashtable<String, Object>> result = new Vector<Hashtable<String, Object>>();
161
162        Calendar cal = Calendar.getInstance();
163        cal.setTime( since );
164
165        //
166        //  Convert UTC to our time.
167        //
168        cal.add( Calendar.MILLISECOND,
169                 (cal.get( Calendar.ZONE_OFFSET ) +
170                  (cal.getTimeZone().inDaylightTime(since) ? cal.get( Calendar.DST_OFFSET ) : 0 ) ) );
171        since = cal.getTime();
172
173        for( Iterator i = pages.iterator(); i.hasNext(); )
174        {
175            WikiPage page = (WikiPage)i.next();
176
177            if( page.getLastModified().after( since ) && !(page instanceof Attachment) )
178            {
179                result.add( encodeWikiPage( page ) );
180            }
181        }
182
183        return result;
184    }
185
186    /**
187     *  Simple helper method, turns the incoming page name into
188     *  normal Java string, then checks page condition.
189     *
190     *  @param pagename Page Name as an RPC string (URL-encoded UTF-8)
191     *  @return Real page name, as Java string.
192     *  @throws XmlRpcException, if there is something wrong with the page.
193     */
194    private String parsePageCheckCondition( String pagename )
195        throws XmlRpcException
196    {
197        pagename = fromRPCString( pagename );
198
199        if( !m_engine.pageExists(pagename) )
200        {
201            throw new XmlRpcException( ERR_NOPAGE, "No such page '"+pagename+"' found, o master." );
202        }
203
204        WikiPage p = m_engine.getPage( pagename );
205        
206        checkPermission( PermissionFactory.getPagePermission( p, PagePermission.VIEW_ACTION ) );
207        
208        return pagename;
209    }
210
211    public Hashtable getPageInfo( String pagename )
212        throws XmlRpcException
213    {
214        pagename = parsePageCheckCondition( pagename );
215        return encodeWikiPage( m_engine.getPage(pagename) );
216    }
217
218    public Hashtable getPageInfoVersion( String pagename, int version )
219        throws XmlRpcException
220    {
221        pagename = parsePageCheckCondition( pagename );
222
223        return encodeWikiPage( m_engine.getPage( pagename, version ) );
224    }
225
226    public byte[] getPage( String pagename )
227        throws XmlRpcException
228    {
229        pagename = parsePageCheckCondition( pagename );
230
231        String text = m_engine.getPureText( pagename, -1 );
232
233        return toRPCBase64( text );
234    }
235
236    public byte[] getPageVersion( String pagename, int version )
237        throws XmlRpcException
238    {
239        pagename = parsePageCheckCondition( pagename );
240
241        return toRPCBase64( m_engine.getPureText( pagename, version ) );
242    }
243
244    public byte[] getPageHTML( String pagename )
245        throws XmlRpcException    
246    {
247        pagename = parsePageCheckCondition( pagename );
248
249        return toRPCBase64( m_engine.getHTML( pagename ) );
250    }
251
252    public byte[] getPageHTMLVersion( String pagename, int version )
253        throws XmlRpcException
254    {
255        pagename = parsePageCheckCondition( pagename );
256
257        return toRPCBase64( m_engine.getHTML( pagename, version ) );
258    }
259
260    public Vector listLinks( String pagename )
261        throws XmlRpcException
262    {
263        pagename = parsePageCheckCondition( pagename );
264
265        WikiPage page = m_engine.getPage( pagename );
266        String pagedata = m_engine.getPureText( page );
267
268        LinkCollector localCollector = new LinkCollector();
269        LinkCollector extCollector   = new LinkCollector();
270        LinkCollector attCollector   = new LinkCollector();
271
272        WikiContext context = new WikiContext( m_engine, page );
273        context.setVariable( WikiEngine.PROP_REFSTYLE, "absolute" );
274
275        m_engine.textToHTML( context,
276                             pagedata,
277                             localCollector,
278                             extCollector,
279                             attCollector );
280
281        Vector<Hashtable<String, String>> result = new Vector<Hashtable<String, String>>();
282
283        //
284        //  Add local links.
285        //
286        for( Iterator< String > i = localCollector.getLinks().iterator(); i.hasNext(); )
287        {
288            String link = i.next();
289            Hashtable< String, String > ht = new Hashtable<String, String>();
290            ht.put( "page", toRPCString( link ) );
291            ht.put( "type", LINK_LOCAL );
292
293            //
294            //  FIXME: This is a kludge.  The link format should really be queried
295            //  from the TranslatorReader itself.  Also, the link format should probably
296            //  have information on whether the page exists or not.
297            //
298
299            //
300            //  FIXME: The current link collector interface is not very good, since
301            //  it causes this.
302            //
303
304            if( m_engine.pageExists(link) )
305            {
306                ht.put( "href", context.getURL(WikiContext.VIEW,link) );
307            }
308            else
309            {
310                ht.put( "href", context.getURL(WikiContext.EDIT,link) );
311            }
312
313            result.add( ht );
314        }
315
316        //
317        // Add links to inline attachments
318        //
319        for( Iterator< String > i = attCollector.getLinks().iterator(); i.hasNext(); )
320        {
321            String link = i.next();
322
323            Hashtable< String, String > ht = new Hashtable< String, String >();
324
325            ht.put( "page", toRPCString( link ) );
326            ht.put( "type", LINK_LOCAL );
327            ht.put( "href", context.getURL( WikiContext.ATTACH, link ) );
328
329            result.add( ht );
330        }
331
332        //
333        // External links don't need to be changed into XML-RPC strings,
334        // simply because URLs are by definition ASCII.
335        //
336
337        for( Iterator< String > i = extCollector.getLinks().iterator(); i.hasNext(); )
338        {
339            String link = i.next();
340
341            Hashtable< String, String > ht = new Hashtable< String, String >();
342
343            ht.put( "page", link );
344            ht.put( "type", LINK_EXTERNAL );
345            ht.put( "href", link );
346
347            result.add( ht );
348        }
349
350        return result;
351    }
352}