001/*
002    Licensed to the Apache Software Foundation (ASF) under one
003    or more contributor license agreements.  See the NOTICE file
004    distributed with this work for additional information
005    regarding copyright ownership.  The ASF licenses this file
006    to you under the Apache License, Version 2.0 (the
007    "License"); you may not use this file except in compliance
008    with the License.  You may obtain a copy of the License at
009
010       http://www.apache.org/licenses/LICENSE-2.0
011
012    Unless required by applicable law or agreed to in writing,
013    software distributed under the License is distributed on an
014    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015    KIND, either express or implied.  See the License for the
016    specific language governing permissions and limitations
017    under the License.
018 */
019package org.apache.wiki.xmlrpc;
020
021import java.io.UnsupportedEncodingException;
022import java.util.Calendar;
023import java.util.Collection;
024import java.util.Date;
025import java.util.Hashtable;
026import java.util.Iterator;
027import java.util.Vector;
028
029import org.apache.log4j.Logger;
030import org.apache.wiki.LinkCollector;
031import org.apache.wiki.WikiContext;
032import org.apache.wiki.WikiEngine;
033import org.apache.wiki.WikiPage;
034import org.apache.wiki.attachment.Attachment;
035import org.apache.wiki.auth.permissions.PagePermission;
036import org.apache.wiki.auth.permissions.PermissionFactory;
037import org.apache.wiki.util.TextUtil;
038import org.apache.xmlrpc.XmlRpcException;
039
040/**
041 *  Provides handlers for all RPC routines.
042 *
043 *  @since 1.6.6
044 */
045// We could use WikiEngine directly, but because of introspection it would
046// show just too many methods to be safe.
047public class RPCHandler
048    extends AbstractRPCHandler
049{
050    private static Logger log = Logger.getLogger( RPCHandler.class );
051
052    /**
053     *  {@inheritDoc}
054     */
055    public void initialize( WikiContext ctx )
056    {
057        super.initialize( ctx );
058    }
059
060    /**
061     *  Converts Java string into RPC string.
062     */
063    private String toRPCString( String src )
064    {
065        return TextUtil.urlEncodeUTF8( src );
066    }
067
068    /**
069     *  Converts RPC string (UTF-8, url encoded) into Java string.
070     */
071    private String fromRPCString( String src )
072    {
073        return TextUtil.urlDecodeUTF8( src );
074    }
075
076    /**
077     *  Transforms a Java string into UTF-8.
078     */
079    private byte[] toRPCBase64( String src )
080    {
081        try
082        {
083            return src.getBytes("UTF-8");
084        }
085        catch( UnsupportedEncodingException e )
086        {
087            //
088            //  You shouldn't be running JSPWiki on a platform that does not
089            //  use UTF-8.  We revert to platform default, so that the other
090            //  end might have a chance of getting something.
091            //
092            log.fatal("Platform does not support UTF-8, reverting to platform default");
093            return src.getBytes();
094        }
095    }
096
097    public String getApplicationName()
098    {
099        checkPermission( PagePermission.VIEW );
100        return toRPCString(m_engine.getApplicationName());
101    }
102
103    public Vector getAllPages()
104    {
105        checkPermission( PagePermission.VIEW );
106        Collection< WikiPage > pages = m_engine.getRecentChanges();
107        Vector<String> result = new Vector<String>();
108
109        for( WikiPage p : pages )
110        {
111            if( !(p instanceof Attachment) )
112            {
113                result.add( toRPCString(p.getName()) );
114            }
115        }
116
117        return result;
118    }
119
120    /**
121     *  Encodes a single wiki page info into a Hashtable.
122     */
123    protected Hashtable<String,Object> encodeWikiPage( WikiPage page )
124    {
125        Hashtable<String, Object> ht = new Hashtable<String, Object>();
126
127        ht.put( "name", toRPCString(page.getName()) );
128
129        Date d = page.getLastModified();
130
131        //
132        //  Here we reset the DST and TIMEZONE offsets of the
133        //  calendar.  Unfortunately, I haven't thought of a better
134        //  way to ensure that we're getting the proper date
135        //  from the XML-RPC thingy, except to manually adjust the date.
136        //
137
138        Calendar cal = Calendar.getInstance();
139        cal.setTime( d );
140        cal.add( Calendar.MILLISECOND,
141                 - (cal.get( Calendar.ZONE_OFFSET ) +
142                    (cal.getTimeZone().inDaylightTime( d ) ? cal.get( Calendar.DST_OFFSET ) : 0 )) );
143
144        ht.put( "lastModified", cal.getTime() );
145        ht.put( "version", page.getVersion() );
146
147        if( page.getAuthor() != null )
148        {
149            ht.put( "author", toRPCString(page.getAuthor()) );
150        }
151
152        return ht;
153    }
154
155    public Vector getRecentChanges( Date since )
156    {
157        checkPermission( PagePermission.VIEW );
158        Collection< WikiPage > pages = m_engine.getRecentChanges();
159        Vector<Hashtable<String, Object>> result = new Vector<Hashtable<String, Object>>();
160
161        Calendar cal = Calendar.getInstance();
162        cal.setTime( since );
163
164        //
165        //  Convert UTC to our time.
166        //
167        cal.add( Calendar.MILLISECOND,
168                 (cal.get( Calendar.ZONE_OFFSET ) +
169                  (cal.getTimeZone().inDaylightTime(since) ? cal.get( Calendar.DST_OFFSET ) : 0 ) ) );
170        since = cal.getTime();
171
172        for( WikiPage page : pages )
173        {
174            if( page.getLastModified().after( since ) && !(page instanceof Attachment) )
175            {
176                result.add( encodeWikiPage( page ) );
177            }
178        }
179
180        return result;
181    }
182
183    /**
184     *  Simple helper method, turns the incoming page name into
185     *  normal Java string, then checks page condition.
186     *
187     *  @param pagename Page Name as an RPC string (URL-encoded UTF-8)
188     *  @return Real page name, as Java string.
189     *  @throws XmlRpcException, if there is something wrong with the page.
190     */
191    private String parsePageCheckCondition( String pagename )
192        throws XmlRpcException
193    {
194        pagename = fromRPCString( pagename );
195
196        if( !m_engine.pageExists(pagename) )
197        {
198            throw new XmlRpcException( ERR_NOPAGE, "No such page '"+pagename+"' found, o master." );
199        }
200
201        WikiPage p = m_engine.getPage( pagename );
202
203        checkPermission( PermissionFactory.getPagePermission( p, PagePermission.VIEW_ACTION ) );
204
205        return pagename;
206    }
207
208    public Hashtable getPageInfo( String pagename )
209        throws XmlRpcException
210    {
211        pagename = parsePageCheckCondition( pagename );
212        return encodeWikiPage( m_engine.getPage(pagename) );
213    }
214
215    public Hashtable getPageInfoVersion( String pagename, int version )
216        throws XmlRpcException
217    {
218        pagename = parsePageCheckCondition( pagename );
219
220        return encodeWikiPage( m_engine.getPage( pagename, version ) );
221    }
222
223    public byte[] getPage( String pagename )
224        throws XmlRpcException
225    {
226        pagename = parsePageCheckCondition( pagename );
227
228        String text = m_engine.getPureText( pagename, -1 );
229
230        return toRPCBase64( text );
231    }
232
233    public byte[] getPageVersion( String pagename, int version )
234        throws XmlRpcException
235    {
236        pagename = parsePageCheckCondition( pagename );
237
238        return toRPCBase64( m_engine.getPureText( pagename, version ) );
239    }
240
241    public byte[] getPageHTML( String pagename )
242        throws XmlRpcException
243    {
244        pagename = parsePageCheckCondition( pagename );
245
246        return toRPCBase64( m_engine.getHTML( pagename ) );
247    }
248
249    public byte[] getPageHTMLVersion( String pagename, int version )
250        throws XmlRpcException
251    {
252        pagename = parsePageCheckCondition( pagename );
253
254        return toRPCBase64( m_engine.getHTML( pagename, version ) );
255    }
256
257    public Vector listLinks( String pagename )
258        throws XmlRpcException
259    {
260        pagename = parsePageCheckCondition( pagename );
261
262        WikiPage page = m_engine.getPage( pagename );
263        String pagedata = m_engine.getPureText( page );
264
265        LinkCollector localCollector = new LinkCollector();
266        LinkCollector extCollector   = new LinkCollector();
267        LinkCollector attCollector   = new LinkCollector();
268
269        WikiContext context = new WikiContext( m_engine, page );
270        context.setVariable( WikiEngine.PROP_REFSTYLE, "absolute" );
271
272        m_engine.textToHTML( context,
273                             pagedata,
274                             localCollector,
275                             extCollector,
276                             attCollector );
277
278        Vector<Hashtable<String, String>> result = new Vector<Hashtable<String, String>>();
279
280        //
281        //  Add local links.
282        //
283        for( Iterator< String > i = localCollector.getLinks().iterator(); i.hasNext(); )
284        {
285            String link = i.next();
286            Hashtable< String, String > ht = new Hashtable<String, String>();
287            ht.put( "page", toRPCString( link ) );
288            ht.put( "type", LINK_LOCAL );
289
290            //
291            //  FIXME: This is a kludge.  The link format should really be queried
292            //  from the TranslatorReader itself.  Also, the link format should probably
293            //  have information on whether the page exists or not.
294            //
295
296            //
297            //  FIXME: The current link collector interface is not very good, since
298            //  it causes this.
299            //
300
301            if( m_engine.pageExists(link) )
302            {
303                ht.put( "href", context.getURL(WikiContext.VIEW,link) );
304            }
305            else
306            {
307                ht.put( "href", context.getURL(WikiContext.EDIT,link) );
308            }
309
310            result.add( ht );
311        }
312
313        //
314        // Add links to inline attachments
315        //
316        for( Iterator< String > i = attCollector.getLinks().iterator(); i.hasNext(); )
317        {
318            String link = i.next();
319
320            Hashtable< String, String > ht = new Hashtable< String, String >();
321
322            ht.put( "page", toRPCString( link ) );
323            ht.put( "type", LINK_LOCAL );
324            ht.put( "href", context.getURL( WikiContext.ATTACH, link ) );
325
326            result.add( ht );
327        }
328
329        //
330        // External links don't need to be changed into XML-RPC strings,
331        // simply because URLs are by definition ASCII.
332        //
333
334        for( Iterator< String > i = extCollector.getLinks().iterator(); i.hasNext(); )
335        {
336            String link = i.next();
337
338            Hashtable< String, String > ht = new Hashtable< String, String >();
339
340            ht.put( "page", link );
341            ht.put( "type", LINK_EXTERNAL );
342            ht.put( "href", link );
343
344            result.add( ht );
345        }
346
347        return result;
348    }
349}