001/*
002    Licensed to the Apache Software Foundation (ASF) under one
003    or more contributor license agreements.  See the NOTICE file
004    distributed with this work for additional information
005    regarding copyright ownership.  The ASF licenses this file
006    to you under the Apache License, Version 2.0 (the
007    "License"); you may not use this file except in compliance
008    with the License.  You may obtain a copy of the License at
009
010       http://www.apache.org/licenses/LICENSE-2.0
011
012    Unless required by applicable law or agreed to in writing,
013    software distributed under the License is distributed on an
014    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015    KIND, either express or implied.  See the License for the
016    specific language governing permissions and limitations
017    under the License.
018 */
019package org.apache.wiki.render;
020
021import net.sf.ehcache.Cache;
022import net.sf.ehcache.CacheManager;
023import net.sf.ehcache.Element;
024import org.apache.commons.lang3.time.StopWatch;
025import org.apache.log4j.Logger;
026import org.apache.wiki.StringTransmutator;
027import org.apache.wiki.api.core.Attachment;
028import org.apache.wiki.api.core.Context;
029import org.apache.wiki.api.core.ContextEnum;
030import org.apache.wiki.api.core.Engine;
031import org.apache.wiki.api.core.Page;
032import org.apache.wiki.api.exceptions.FilterException;
033import org.apache.wiki.api.exceptions.ProviderException;
034import org.apache.wiki.api.exceptions.WikiException;
035import org.apache.wiki.api.providers.PageProvider;
036import org.apache.wiki.api.spi.Wiki;
037import org.apache.wiki.attachment.AttachmentManager;
038import org.apache.wiki.event.WikiEvent;
039import org.apache.wiki.event.WikiEventListener;
040import org.apache.wiki.event.WikiEventManager;
041import org.apache.wiki.event.WikiPageEvent;
042import org.apache.wiki.filters.FilterManager;
043import org.apache.wiki.pages.PageManager;
044import org.apache.wiki.parser.JSPWikiMarkupParser;
045import org.apache.wiki.parser.MarkupParser;
046import org.apache.wiki.parser.WikiDocument;
047import org.apache.wiki.references.ReferenceManager;
048import org.apache.wiki.util.ClassUtil;
049import org.apache.wiki.util.TextUtil;
050import org.apache.wiki.variables.VariableManager;
051
052import java.io.IOException;
053import java.io.StringReader;
054import java.lang.reflect.Constructor;
055import java.util.Collection;
056import java.util.Properties;
057
058
059/**
060 *  This class provides a facade towards the differing rendering routines.  You should use the routines in this manager
061 *  instead of the ones in Engine, if you don't want the different side effects to occur - such as WikiFilters.
062 *  <p>
063 *  This class also manages a rendering cache, i.e. documents are stored between calls. You may control the cache by
064 *  tweaking the ehcache.xml file.
065 *  <p>
066 *
067 *  @since  2.4
068 */
069public class DefaultRenderingManager implements RenderingManager {
070
071    private static final Logger log = Logger.getLogger( DefaultRenderingManager.class );
072
073    /** The capacity of the caches, if you want something else, tweak ehcache.xml. */
074    private static final int    DEFAULT_CACHESIZE     = 1_000;
075    private static final String VERSION_DELIMITER     = "::";
076
077    /** The name of the default renderer. */
078    private static final String DEFAULT_PARSER = JSPWikiMarkupParser.class.getName();
079    /** The name of the default renderer. */
080    private static final String DEFAULT_RENDERER = XHTMLRenderer.class.getName();
081    /** The name of the default WYSIWYG renderer. */
082    private static final String DEFAULT_WYSIWYG_RENDERER = WysiwygEditingRenderer.class.getName();
083
084    private Engine m_engine;
085
086    private boolean m_useCache = true;
087    private final CacheManager m_cacheManager = CacheManager.getInstance();
088    private final int m_cacheExpiryPeriod = 24*60*60; // This can be relatively long
089
090    /** If true, all titles will be cleaned. */
091    private boolean m_beautifyTitle = false;
092
093    /** Stores the WikiDocuments that have been cached. */
094    private Cache m_documentCache;
095
096    private Constructor< ? > m_rendererConstructor;
097    private Constructor< ? > m_rendererWysiwygConstructor;
098    private String m_markupParserClass = DEFAULT_PARSER;
099
100    /**
101     *  {@inheritDoc}
102     *
103     *  Checks for cache size settings, initializes the document cache. Looks for alternative WikiRenderers, initializes one, or the
104     *  default XHTMLRenderer, for use.
105     */
106    @Override
107    public void initialize( final Engine engine, final Properties properties ) throws WikiException {
108        m_engine = engine;
109        m_markupParserClass = properties.getProperty( PROP_PARSER, DEFAULT_PARSER );
110        if( !ClassUtil.assignable( m_markupParserClass, MarkupParser.class.getName() ) ) {
111            log.warn( m_markupParserClass + " does not subclass " + MarkupParser.class.getName() + " reverting to default markup parser." );
112            m_markupParserClass = DEFAULT_PARSER;
113        }
114        log.info( "Using " + m_markupParserClass + " as markup parser." );
115
116        m_beautifyTitle  = TextUtil.getBooleanProperty( properties, PROP_BEAUTIFYTITLE, m_beautifyTitle );
117        m_useCache = "true".equals( properties.getProperty( PageManager.PROP_USECACHE ) );
118
119        if( m_useCache ) {
120            final String documentCacheName = engine.getApplicationName() + "." + DOCUMENTCACHE_NAME;
121            if (m_cacheManager.cacheExists(documentCacheName)) {
122                m_documentCache = m_cacheManager.getCache(documentCacheName);
123            } else {
124                log.info( "cache with name " + documentCacheName + " not found in ehcache.xml, creating it with defaults." );
125                m_documentCache = new Cache( documentCacheName, DEFAULT_CACHESIZE, false, false, m_cacheExpiryPeriod, m_cacheExpiryPeriod );
126                m_cacheManager.addCache( m_documentCache );
127            }
128        }
129
130        final String renderImplName = properties.getProperty( PROP_RENDERER, DEFAULT_RENDERER );
131        final String renderWysiwygImplName = properties.getProperty( PROP_WYSIWYG_RENDERER, DEFAULT_WYSIWYG_RENDERER );
132
133        final Class< ? >[] rendererParams = { Context.class, WikiDocument.class };
134        m_rendererConstructor = initRenderer( renderImplName, rendererParams );
135        m_rendererWysiwygConstructor = initRenderer( renderWysiwygImplName, rendererParams );
136
137        log.info( "Rendering content with " + renderImplName + "." );
138
139        WikiEventManager.getInstance().addWikiEventListener( m_engine.getManager( FilterManager.class ),this );
140    }
141
142    private Constructor< ? > initRenderer( final String renderImplName, final Class< ? >[] rendererParams ) throws WikiException {
143        Constructor< ? > c = null;
144        try {
145            final Class< ? > clazz = Class.forName( renderImplName );
146            c = clazz.getConstructor( rendererParams );
147        } catch( final ClassNotFoundException e ) {
148            log.error( "Unable to find WikiRenderer implementation " + renderImplName );
149        } catch( final SecurityException e ) {
150            log.error( "Unable to access the WikiRenderer(WikiContext,WikiDocument) constructor for "  + renderImplName );
151        } catch( final NoSuchMethodException e ) {
152            log.error( "Unable to locate the WikiRenderer(WikiContext,WikiDocument) constructor for "  + renderImplName );
153        }
154        if( c == null ) {
155            throw new WikiException( "Failed to get WikiRenderer '" + renderImplName + "'." );
156        }
157        return c;
158    }
159
160    /**
161     * {@inheritDoc}
162     */
163    @Override
164    public String beautifyTitle( final String title ) {
165        if( m_beautifyTitle ) {
166            try {
167                final Attachment att = m_engine.getManager( AttachmentManager.class ).getAttachmentInfo( title );
168                if( att == null ) {
169                    return TextUtil.beautifyString( title );
170                }
171
172                final String parent = TextUtil.beautifyString( att.getParentName() );
173                return parent + "/" + att.getFileName();
174            } catch( final ProviderException e ) {
175                return title;
176            }
177        }
178
179        return title;
180    }
181
182    /**
183     * {@inheritDoc}
184     */
185    @Override
186    public String beautifyTitleNoBreak( final String title ) {
187        if( m_beautifyTitle ) {
188            return TextUtil.beautifyString( title, "&nbsp;" );
189        }
190
191        return title;
192    }
193
194    /**
195     *  {@inheritDoc}
196     */
197    @Override
198    public MarkupParser getParser( final Context context, final String pagedata ) {
199        try {
200            return ClassUtil.getMappedObject( m_markupParserClass, context, new StringReader( pagedata ) );
201        } catch( final ReflectiveOperationException | IllegalArgumentException e ) {
202            log.error( "unable to get an instance of " + m_markupParserClass + " (" + e.getMessage() + "), returning default markup parser.", e );
203            return new JSPWikiMarkupParser( context, new StringReader( pagedata ) );
204        }
205    }
206
207    /**
208     *  {@inheritDoc}
209     */
210    @Override
211    // FIXME: The cache management policy is not very good: deleted/changed pages should be detected better.
212    public WikiDocument getRenderedDocument( final Context context, final String pagedata ) {
213        final String pageid = context.getRealPage().getName() + VERSION_DELIMITER +
214                              context.getRealPage().getVersion() + VERSION_DELIMITER +
215                              context.getVariable( Context.VAR_EXECUTE_PLUGINS );
216
217        if( useCache( context ) ) {
218            final Element element = m_documentCache.get( pageid );
219            if ( element != null ) {
220                final WikiDocument doc = ( WikiDocument )element.getObjectValue();
221
222                //
223                //  This check is needed in case the different filters have actually changed the page data.
224                //  FIXME: Figure out a faster method
225                if( pagedata.equals( doc.getPageData() ) ) {
226                    if( log.isDebugEnabled() ) {
227                        log.debug( "Using cached HTML for page " + pageid );
228                    }
229                    return doc;
230                }
231            } else if( log.isDebugEnabled() ) {
232                log.debug( "Re-rendering and storing " + pageid );
233            }
234        }
235
236        //  Refresh the data content
237        try {
238            final MarkupParser parser = getParser( context, pagedata );
239            final WikiDocument doc = parser.parse();
240            doc.setPageData( pagedata );
241            if( useCache( context ) ) {
242                m_documentCache.put( new Element( pageid, doc ) );
243            }
244            return doc;
245        } catch( final IOException ex ) {
246            log.error( "Unable to parse", ex );
247        }
248
249        return null;
250    }
251
252    boolean useCache( final Context context ) {
253        return m_useCache && ContextEnum.PAGE_VIEW.getRequestContext().equals( context.getRequestContext() );
254    }
255
256    /**
257     *  {@inheritDoc}
258     */
259    @Override
260    public String getHTML( final Context context, final WikiDocument doc ) throws IOException {
261        final Boolean wysiwygVariable = context.getVariable( Context.VAR_WYSIWYG_EDITOR_MODE );
262        final boolean wysiwygEditorMode;
263        if( wysiwygVariable != null ) {
264            wysiwygEditorMode = wysiwygVariable;
265        } else {
266            wysiwygEditorMode = false;
267        }
268        final WikiRenderer rend;
269        if( wysiwygEditorMode ) {
270            rend = getWysiwygRenderer( context, doc );
271        } else {
272            rend = getRenderer( context, doc );
273        }
274
275        return rend.getString();
276    }
277
278    /**
279     *  {@inheritDoc}
280     */
281    @Override
282    public String getHTML( final Context context, final Page page ) {
283        final String pagedata = m_engine.getManager( PageManager.class ).getPureText( page.getName(), page.getVersion() );
284        return textToHTML( context, pagedata );
285    }
286
287    /**
288     *  Returns the converted HTML of the page's specific version. The version must be a positive integer, otherwise the current
289     *  version is returned.
290     *
291     *  @param pagename WikiName of the page to convert.
292     *  @param version Version number to fetch
293     *  @return HTML-rendered page text.
294     */
295    @Override
296    public String getHTML( final String pagename, final int version ) {
297        final Page page = m_engine.getManager( PageManager.class ).getPage( pagename, version );
298        final Context context = Wiki.context().create( m_engine, page );
299        context.setRequestContext( ContextEnum.PAGE_NONE.getRequestContext() );
300        return getHTML( context, page );
301    }
302
303    /**
304     *  {@inheritDoc}
305     */
306    @Override
307    public String textToHTML( final Context context, String pagedata ) {
308        String result = "";
309
310        final boolean runFilters = "true".equals( m_engine.getManager( VariableManager.class ).getValue( context,VariableManager.VAR_RUNFILTERS,"true" ) );
311
312        final StopWatch sw = new StopWatch();
313        sw.start();
314        try {
315            if( runFilters ) {
316                pagedata = m_engine.getManager( FilterManager.class ).doPreTranslateFiltering( context, pagedata );
317            }
318
319            result = getHTML( context, pagedata );
320
321            if( runFilters ) {
322                result = m_engine.getManager( FilterManager.class ).doPostTranslateFiltering( context, result );
323            }
324        } catch( final FilterException e ) {
325            log.error( "page filter threw exception: ", e );
326            // FIXME: Don't yet know what to do
327        }
328        sw.stop();
329        if( log.isDebugEnabled() ) {
330            log.debug( "Page " + context.getRealPage().getName() + " rendered, took " + sw );
331        }
332
333        return result;
334    }
335
336    /**
337     *  {@inheritDoc}
338     */
339    @Override
340    public String textToHTML( final Context context,
341                              String pagedata,
342                              final StringTransmutator localLinkHook,
343                              final StringTransmutator extLinkHook,
344                              final StringTransmutator attLinkHook,
345                              final boolean parseAccessRules,
346                              final boolean justParse ) {
347        String result = "";
348
349        if( pagedata == null ) {
350            log.error("NULL pagedata to textToHTML()");
351            return null;
352        }
353
354        final boolean runFilters = "true".equals( m_engine.getManager( VariableManager.class ).getValue( context, VariableManager.VAR_RUNFILTERS,"true" ) );
355
356        try {
357            final StopWatch sw = new StopWatch();
358            sw.start();
359
360            if( runFilters && m_engine.getManager( FilterManager.class ) != null ) {
361                pagedata = m_engine.getManager( FilterManager.class ).doPreTranslateFiltering( context, pagedata );
362            }
363
364            final MarkupParser mp = getParser( context, pagedata );
365            mp.addLocalLinkHook( localLinkHook );
366            mp.addExternalLinkHook( extLinkHook );
367            mp.addAttachmentLinkHook( attLinkHook );
368
369            if( !parseAccessRules ) {
370                mp.disableAccessRules();
371            }
372
373            final WikiDocument doc = mp.parse();
374
375            //  In some cases it's better just to parse, not to render
376            if( !justParse ) {
377                result = getHTML( context, doc );
378
379                if( runFilters && m_engine.getManager( FilterManager.class ) != null ) {
380                    result = m_engine.getManager( FilterManager.class ).doPostTranslateFiltering( context, result );
381                }
382            }
383
384            sw.stop();
385
386            if( log.isDebugEnabled() ) {
387                log.debug( "Page " + context.getRealPage().getName() + " rendered, took " + sw );
388            }
389        } catch( final IOException e ) {
390            log.error( "Failed to scan page data: ", e );
391        } catch( final FilterException e ) {
392            log.error( "page filter threw exception: ", e );
393            // FIXME: Don't yet know what to do
394        }
395
396        return result;
397    }
398
399    /**
400     *  {@inheritDoc}
401     */
402    @Override
403    public WikiRenderer getRenderer( final Context context, final WikiDocument doc ) {
404        final Object[] params = { context, doc };
405        return getRenderer( params, m_rendererConstructor );
406    }
407
408    /**
409     *  {@inheritDoc}
410     */
411    @Override
412    public WikiRenderer getWysiwygRenderer( final Context context, final WikiDocument doc ) {
413        final Object[] params = { context, doc };
414        return getRenderer( params, m_rendererWysiwygConstructor );
415    }
416
417    @SuppressWarnings("unchecked")
418    private < T extends WikiRenderer > T getRenderer( final Object[] params, final Constructor<?> rendererConstructor ) {
419        try {
420            return ( T )rendererConstructor.newInstance( params );
421        } catch( final Exception e ) {
422            log.error( "Unable to create WikiRenderer", e );
423        }
424        return null;
425    }
426
427    /**
428     * {@inheritDoc}
429     *
430     * <p>Flushes the document cache in response to a POST_SAVE_BEGIN event.
431     *
432     * @see WikiEventListener#actionPerformed(WikiEvent)
433     */
434    @Override
435    public void actionPerformed( final WikiEvent event ) {
436        log.debug( "event received: " + event.toString() );
437        if( m_useCache ) {
438            if( ( event instanceof WikiPageEvent ) && ( event.getType() == WikiPageEvent.POST_SAVE_BEGIN ) ) {
439                if( m_documentCache != null ) {
440                    final String pageName = ( ( WikiPageEvent ) event ).getPageName();
441                    m_documentCache.remove( pageName );
442                    final Collection< String > referringPages = m_engine.getManager( ReferenceManager.class ).findReferrers( pageName );
443
444                    //
445                    //  Flush also those pages that refer to this page (if an nonexistent page
446                    //  appears, we need to flush the HTML that refers to the now-existent page)
447                    //
448                    if( referringPages != null ) {
449                        for( final String page : referringPages ) {
450                            if( log.isDebugEnabled() ) {
451                                log.debug( "Flushing latest version of " + page );
452                            }
453                            // as there is a new version of the page expire both plugin and pluginless versions of the old page
454                            m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION  + VERSION_DELIMITER + Boolean.FALSE );
455                            m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION  + VERSION_DELIMITER + Boolean.TRUE );
456                            m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION  + VERSION_DELIMITER + null );
457                        }
458                    }
459                }
460            }
461        }
462    }
463
464}