001/* 002 Licensed to the Apache Software Foundation (ASF) under one 003 or more contributor license agreements. See the NOTICE file 004 distributed with this work for additional information 005 regarding copyright ownership. The ASF licenses this file 006 to you under the Apache License, Version 2.0 (the 007 "License"); you may not use this file except in compliance 008 with the License. You may obtain a copy of the License at 009 010 http://www.apache.org/licenses/LICENSE-2.0 011 012 Unless required by applicable law or agreed to in writing, 013 software distributed under the License is distributed on an 014 "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 015 KIND, either express or implied. See the License for the 016 specific language governing permissions and limitations 017 under the License. 018 */ 019package org.apache.wiki.render; 020 021import net.sf.ehcache.Cache; 022import net.sf.ehcache.CacheManager; 023import net.sf.ehcache.Element; 024import org.apache.commons.lang3.time.StopWatch; 025import org.apache.logging.log4j.LogManager; 026import org.apache.logging.log4j.Logger; 027import org.apache.wiki.StringTransmutator; 028import org.apache.wiki.api.core.Attachment; 029import org.apache.wiki.api.core.Context; 030import org.apache.wiki.api.core.ContextEnum; 031import org.apache.wiki.api.core.Engine; 032import org.apache.wiki.api.core.Page; 033import org.apache.wiki.api.exceptions.FilterException; 034import org.apache.wiki.api.exceptions.ProviderException; 035import org.apache.wiki.api.exceptions.WikiException; 036import org.apache.wiki.api.providers.PageProvider; 037import org.apache.wiki.api.spi.Wiki; 038import org.apache.wiki.attachment.AttachmentManager; 039import org.apache.wiki.event.WikiEvent; 040import org.apache.wiki.event.WikiEventListener; 041import org.apache.wiki.event.WikiEventManager; 042import org.apache.wiki.event.WikiPageEvent; 043import org.apache.wiki.filters.FilterManager; 044import org.apache.wiki.pages.PageManager; 045import org.apache.wiki.parser.JSPWikiMarkupParser; 046import org.apache.wiki.parser.MarkupParser; 047import org.apache.wiki.parser.WikiDocument; 048import org.apache.wiki.references.ReferenceManager; 049import org.apache.wiki.util.ClassUtil; 050import org.apache.wiki.util.TextUtil; 051import org.apache.wiki.variables.VariableManager; 052 053import java.io.IOException; 054import java.io.StringReader; 055import java.lang.reflect.Constructor; 056import java.util.Collection; 057import java.util.Properties; 058 059 060/** 061 * This class provides a facade towards the differing rendering routines. You should use the routines in this manager 062 * instead of the ones in Engine, if you don't want the different side effects to occur - such as WikiFilters. 063 * <p> 064 * This class also manages a rendering cache, i.e. documents are stored between calls. You may control the cache by 065 * tweaking the ehcache.xml file. 066 * <p> 067 * 068 * @since 2.4 069 */ 070public class DefaultRenderingManager implements RenderingManager { 071 072 private static final Logger log = LogManager.getLogger( DefaultRenderingManager.class ); 073 074 /** The capacity of the caches, if you want something else, tweak ehcache.xml. */ 075 private static final int DEFAULT_CACHESIZE = 1_000; 076 private static final String VERSION_DELIMITER = "::"; 077 078 /** The name of the default renderer. */ 079 private static final String DEFAULT_PARSER = JSPWikiMarkupParser.class.getName(); 080 /** The name of the default renderer. */ 081 private static final String DEFAULT_RENDERER = XHTMLRenderer.class.getName(); 082 /** The name of the default WYSIWYG renderer. */ 083 private static final String DEFAULT_WYSIWYG_RENDERER = WysiwygEditingRenderer.class.getName(); 084 085 private Engine m_engine; 086 087 private boolean m_useCache = true; 088 private final CacheManager m_cacheManager = CacheManager.getInstance(); 089 private final int m_cacheExpiryPeriod = 24*60*60; // This can be relatively long 090 091 /** If true, all titles will be cleaned. */ 092 private boolean m_beautifyTitle; 093 094 /** Stores the WikiDocuments that have been cached. */ 095 private Cache m_documentCache; 096 097 private Constructor< ? > m_rendererConstructor; 098 private Constructor< ? > m_rendererWysiwygConstructor; 099 private String m_markupParserClass = DEFAULT_PARSER; 100 101 /** 102 * {@inheritDoc} 103 * 104 * Checks for cache size settings, initializes the document cache. Looks for alternative WikiRenderers, initializes one, or the 105 * default XHTMLRenderer, for use. 106 */ 107 @Override 108 public void initialize( final Engine engine, final Properties properties ) throws WikiException { 109 m_engine = engine; 110 m_markupParserClass = properties.getProperty( PROP_PARSER, DEFAULT_PARSER ); 111 if( !ClassUtil.assignable( m_markupParserClass, MarkupParser.class.getName() ) ) { 112 log.warn( m_markupParserClass + " does not subclass " + MarkupParser.class.getName() + " reverting to default markup parser." ); 113 m_markupParserClass = DEFAULT_PARSER; 114 } 115 log.info( "Using " + m_markupParserClass + " as markup parser." ); 116 117 m_beautifyTitle = TextUtil.getBooleanProperty( properties, PROP_BEAUTIFYTITLE, m_beautifyTitle ); 118 m_useCache = "true".equals( properties.getProperty( PageManager.PROP_USECACHE ) ); 119 120 if( m_useCache ) { 121 final String documentCacheName = engine.getApplicationName() + "." + DOCUMENTCACHE_NAME; 122 if (m_cacheManager.cacheExists(documentCacheName)) { 123 m_documentCache = m_cacheManager.getCache(documentCacheName); 124 } else { 125 log.info( "cache with name " + documentCacheName + " not found in ehcache.xml, creating it with defaults." ); 126 m_documentCache = new Cache( documentCacheName, DEFAULT_CACHESIZE, false, false, m_cacheExpiryPeriod, m_cacheExpiryPeriod ); 127 m_cacheManager.addCache( m_documentCache ); 128 } 129 } 130 131 final String renderImplName = properties.getProperty( PROP_RENDERER, DEFAULT_RENDERER ); 132 final String renderWysiwygImplName = properties.getProperty( PROP_WYSIWYG_RENDERER, DEFAULT_WYSIWYG_RENDERER ); 133 134 final Class< ? >[] rendererParams = { Context.class, WikiDocument.class }; 135 m_rendererConstructor = initRenderer( renderImplName, rendererParams ); 136 m_rendererWysiwygConstructor = initRenderer( renderWysiwygImplName, rendererParams ); 137 138 log.info( "Rendering content with " + renderImplName + "." ); 139 140 WikiEventManager.getInstance().addWikiEventListener( m_engine.getManager( FilterManager.class ),this ); 141 } 142 143 private Constructor< ? > initRenderer( final String renderImplName, final Class< ? >[] rendererParams ) throws WikiException { 144 Constructor< ? > c = null; 145 try { 146 final Class< ? > clazz = Class.forName( renderImplName ); 147 c = clazz.getConstructor( rendererParams ); 148 } catch( final ClassNotFoundException e ) { 149 log.error( "Unable to find WikiRenderer implementation " + renderImplName ); 150 } catch( final SecurityException e ) { 151 log.error( "Unable to access the WikiRenderer(WikiContext,WikiDocument) constructor for " + renderImplName ); 152 } catch( final NoSuchMethodException e ) { 153 log.error( "Unable to locate the WikiRenderer(WikiContext,WikiDocument) constructor for " + renderImplName ); 154 } 155 if( c == null ) { 156 throw new WikiException( "Failed to get WikiRenderer '" + renderImplName + "'." ); 157 } 158 return c; 159 } 160 161 /** 162 * {@inheritDoc} 163 */ 164 @Override 165 public String beautifyTitle( final String title ) { 166 if( m_beautifyTitle ) { 167 try { 168 final Attachment att = m_engine.getManager( AttachmentManager.class ).getAttachmentInfo( title ); 169 if( att == null ) { 170 return TextUtil.beautifyString( title ); 171 } 172 173 final String parent = TextUtil.beautifyString( att.getParentName() ); 174 return parent + "/" + att.getFileName(); 175 } catch( final ProviderException e ) { 176 return title; 177 } 178 } 179 180 return title; 181 } 182 183 /** 184 * {@inheritDoc} 185 */ 186 @Override 187 public String beautifyTitleNoBreak( final String title ) { 188 if( m_beautifyTitle ) { 189 return TextUtil.beautifyString( title, " " ); 190 } 191 192 return title; 193 } 194 195 /** 196 * {@inheritDoc} 197 */ 198 @Override 199 public MarkupParser getParser( final Context context, final String pagedata ) { 200 try { 201 return ClassUtil.getMappedObject( m_markupParserClass, context, new StringReader( pagedata ) ); 202 } catch( final ReflectiveOperationException | IllegalArgumentException e ) { 203 log.error( "unable to get an instance of " + m_markupParserClass + " (" + e.getMessage() + "), returning default markup parser.", e ); 204 return new JSPWikiMarkupParser( context, new StringReader( pagedata ) ); 205 } 206 } 207 208 /** 209 * {@inheritDoc} 210 */ 211 @Override 212 // FIXME: The cache management policy is not very good: deleted/changed pages should be detected better. 213 public WikiDocument getRenderedDocument( final Context context, final String pagedata ) { 214 final String pageid = context.getRealPage().getName() + VERSION_DELIMITER + 215 context.getRealPage().getVersion() + VERSION_DELIMITER + 216 context.getVariable( Context.VAR_EXECUTE_PLUGINS ); 217 218 if( useCache( context ) ) { 219 final Element element = m_documentCache.get( pageid ); 220 if ( element != null ) { 221 final WikiDocument doc = ( WikiDocument )element.getObjectValue(); 222 223 // 224 // This check is needed in case the different filters have actually changed the page data. 225 // FIXME: Figure out a faster method 226 if( pagedata.equals( doc.getPageData() ) ) { 227 if( log.isDebugEnabled() ) { 228 log.debug( "Using cached HTML for page " + pageid ); 229 } 230 return doc; 231 } 232 } else if( log.isDebugEnabled() ) { 233 log.debug( "Re-rendering and storing " + pageid ); 234 } 235 } 236 237 // Refresh the data content 238 try { 239 final MarkupParser parser = getParser( context, pagedata ); 240 final WikiDocument doc = parser.parse(); 241 doc.setPageData( pagedata ); 242 if( useCache( context ) ) { 243 m_documentCache.put( new Element( pageid, doc ) ); 244 } 245 return doc; 246 } catch( final IOException ex ) { 247 log.error( "Unable to parse", ex ); 248 } 249 250 return null; 251 } 252 253 boolean useCache( final Context context ) { 254 return m_useCache && ContextEnum.PAGE_VIEW.getRequestContext().equals( context.getRequestContext() ); 255 } 256 257 /** 258 * {@inheritDoc} 259 */ 260 @Override 261 public String getHTML( final Context context, final WikiDocument doc ) throws IOException { 262 final Boolean wysiwygVariable = context.getVariable( Context.VAR_WYSIWYG_EDITOR_MODE ); 263 final boolean wysiwygEditorMode; 264 if( wysiwygVariable != null ) { 265 wysiwygEditorMode = wysiwygVariable; 266 } else { 267 wysiwygEditorMode = false; 268 } 269 final WikiRenderer rend; 270 if( wysiwygEditorMode ) { 271 rend = getWysiwygRenderer( context, doc ); 272 } else { 273 rend = getRenderer( context, doc ); 274 } 275 276 return rend.getString(); 277 } 278 279 /** 280 * {@inheritDoc} 281 */ 282 @Override 283 public String getHTML( final Context context, final Page page ) { 284 final String pagedata = m_engine.getManager( PageManager.class ).getPureText( page.getName(), page.getVersion() ); 285 return textToHTML( context, pagedata ); 286 } 287 288 /** 289 * Returns the converted HTML of the page's specific version. The version must be a positive integer, otherwise the current 290 * version is returned. 291 * 292 * @param pagename WikiName of the page to convert. 293 * @param version Version number to fetch 294 * @return HTML-rendered page text. 295 */ 296 @Override 297 public String getHTML( final String pagename, final int version ) { 298 final Page page = m_engine.getManager( PageManager.class ).getPage( pagename, version ); 299 final Context context = Wiki.context().create( m_engine, page ); 300 context.setRequestContext( ContextEnum.PAGE_NONE.getRequestContext() ); 301 return getHTML( context, page ); 302 } 303 304 /** 305 * {@inheritDoc} 306 */ 307 @Override 308 public String textToHTML( final Context context, String pagedata ) { 309 String result = ""; 310 311 final boolean runFilters = "true".equals( m_engine.getManager( VariableManager.class ).getValue( context,VariableManager.VAR_RUNFILTERS,"true" ) ); 312 313 final StopWatch sw = new StopWatch(); 314 sw.start(); 315 try { 316 if( runFilters ) { 317 pagedata = m_engine.getManager( FilterManager.class ).doPreTranslateFiltering( context, pagedata ); 318 } 319 320 result = getHTML( context, pagedata ); 321 322 if( runFilters ) { 323 result = m_engine.getManager( FilterManager.class ).doPostTranslateFiltering( context, result ); 324 } 325 } catch( final FilterException e ) { 326 log.error( "page filter threw exception: ", e ); 327 // FIXME: Don't yet know what to do 328 } 329 sw.stop(); 330 if( log.isDebugEnabled() ) { 331 log.debug( "Page " + context.getRealPage().getName() + " rendered, took " + sw ); 332 } 333 334 return result; 335 } 336 337 /** 338 * {@inheritDoc} 339 */ 340 @Override 341 public String textToHTML( final Context context, 342 String pagedata, 343 final StringTransmutator localLinkHook, 344 final StringTransmutator extLinkHook, 345 final StringTransmutator attLinkHook, 346 final boolean parseAccessRules, 347 final boolean justParse ) { 348 String result = ""; 349 350 if( pagedata == null ) { 351 log.error("NULL pagedata to textToHTML()"); 352 return null; 353 } 354 355 final boolean runFilters = "true".equals( m_engine.getManager( VariableManager.class ).getValue( context, VariableManager.VAR_RUNFILTERS,"true" ) ); 356 357 try { 358 final StopWatch sw = new StopWatch(); 359 sw.start(); 360 361 if( runFilters && m_engine.getManager( FilterManager.class ) != null ) { 362 pagedata = m_engine.getManager( FilterManager.class ).doPreTranslateFiltering( context, pagedata ); 363 } 364 365 final MarkupParser mp = getParser( context, pagedata ); 366 mp.addLocalLinkHook( localLinkHook ); 367 mp.addExternalLinkHook( extLinkHook ); 368 mp.addAttachmentLinkHook( attLinkHook ); 369 370 if( !parseAccessRules ) { 371 mp.disableAccessRules(); 372 } 373 374 final WikiDocument doc = mp.parse(); 375 376 // In some cases it's better just to parse, not to render 377 if( !justParse ) { 378 result = getHTML( context, doc ); 379 380 if( runFilters && m_engine.getManager( FilterManager.class ) != null ) { 381 result = m_engine.getManager( FilterManager.class ).doPostTranslateFiltering( context, result ); 382 } 383 } 384 385 sw.stop(); 386 387 if( log.isDebugEnabled() ) { 388 log.debug( "Page " + context.getRealPage().getName() + " rendered, took " + sw ); 389 } 390 } catch( final IOException e ) { 391 log.error( "Failed to scan page data: ", e ); 392 } catch( final FilterException e ) { 393 log.error( "page filter threw exception: ", e ); 394 // FIXME: Don't yet know what to do 395 } 396 397 return result; 398 } 399 400 /** 401 * {@inheritDoc} 402 */ 403 @Override 404 public WikiRenderer getRenderer( final Context context, final WikiDocument doc ) { 405 final Object[] params = { context, doc }; 406 return getRenderer( params, m_rendererConstructor ); 407 } 408 409 /** 410 * {@inheritDoc} 411 */ 412 @Override 413 public WikiRenderer getWysiwygRenderer( final Context context, final WikiDocument doc ) { 414 final Object[] params = { context, doc }; 415 return getRenderer( params, m_rendererWysiwygConstructor ); 416 } 417 418 @SuppressWarnings("unchecked") 419 private < T extends WikiRenderer > T getRenderer( final Object[] params, final Constructor<?> rendererConstructor ) { 420 try { 421 return ( T )rendererConstructor.newInstance( params ); 422 } catch( final Exception e ) { 423 log.error( "Unable to create WikiRenderer", e ); 424 } 425 return null; 426 } 427 428 /** 429 * {@inheritDoc} 430 * 431 * <p>Flushes the document cache in response to a POST_SAVE_BEGIN event. 432 * 433 * @see WikiEventListener#actionPerformed(WikiEvent) 434 */ 435 @Override 436 public void actionPerformed( final WikiEvent event ) { 437 log.debug( "event received: " + event.toString() ); 438 if( m_useCache ) { 439 if( ( event instanceof WikiPageEvent ) && ( event.getType() == WikiPageEvent.POST_SAVE_BEGIN ) ) { 440 if( m_documentCache != null ) { 441 final String pageName = ( ( WikiPageEvent ) event ).getPageName(); 442 m_documentCache.remove( pageName ); 443 final Collection< String > referringPages = m_engine.getManager( ReferenceManager.class ).findReferrers( pageName ); 444 445 // 446 // Flush also those pages that refer to this page (if an nonexistent page 447 // appears, we need to flush the HTML that refers to the now-existent page) 448 // 449 if( referringPages != null ) { 450 for( final String page : referringPages ) { 451 if( log.isDebugEnabled() ) { 452 log.debug( "Flushing latest version of " + page ); 453 } 454 // as there is a new version of the page expire both plugin and pluginless versions of the old page 455 m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION + VERSION_DELIMITER + Boolean.FALSE ); 456 m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION + VERSION_DELIMITER + Boolean.TRUE ); 457 m_documentCache.remove( page + VERSION_DELIMITER + PageProvider.LATEST_VERSION + VERSION_DELIMITER + null ); 458 } 459 } 460 } 461 } 462 } 463 } 464 465}