Logo Search packages:      
Sourcecode: zope-cachefu version File versions  Download package


00001 """

$Id: $

__authors__ = 'Geoff Davis <geoff@geoffdavis.net>'
__docformat__ = 'restructuredtext'

import sets
import urlparse

from AccessControl import ClassSecurityInfo
from AccessControl.PermissionRole import rolesForPermissionOn
from BTrees import Length
from DateTime import DateTime
from ZODB.POSException import ConflictError
from Products.PageTemplates.Expressions import getEngine, SecureModuleImporter

from Products.Archetypes import public as atapi
from Products.Archetypes.debug import log_exc
    from Products.CMFCore import permissions
except ImportError:
    from Products.CMFCore import CMFCorePermissions as permissions
from Products.CMFCore.utils import UniqueObject, getToolByName
from Products.CacheSetup import config

schema = atapi.BaseSchema + atapi.Schema((
                      widget=atapi.SelectionWidget(label='Cache Configuration',
                                                   description='Please indicate how your cache(s) are configured.'),
                      vocabulary=atapi.DisplayList((('zserver','Zope only'), ('apache','Zope behind Apache'), \
                                                    ('squid','Zope behind Squid'),('squid_behind_apache','Zope behind Squid behind Apache'))),
                      write_permission = permissions.ManagePortal,
                     widget=atapi.LinesWidget(label='Site Domains',
                                              description='Enter a list of domains for your site.  If your site handles both http://www.mysite.com:80 and http://mysite.com:80,  be sure to include both. Also include https versions of your domains if you use them.  Be sure to include a port for each site.'),
                     write_permission = permissions.ManagePortal,
                     widget=atapi.LinesWidget(label='Squid URLs',
                                              description='URLs for squid proxy caches for your site.  You only need this if you are running squid behind apache.  In the most common setup, there will be a single squid instance at'),
                     write_permission = permissions.ManagePortal,
                                                   description='Should Zope compress pages before serving them, and if so, what criteria should be used to determine whether pages should be gzipped?  If you choose to compress content, Recommended: either "Never" or "Use Accept-Encoding header".',),
                      vocabulary=atapi.DisplayList((('never','Never'),('always','Always'),('accept-encoding','Use Accept-Encoding header'),('accept-encoding+user-agent','Use Accept-Encoding and User-Agent headers'))),
                      write_permission = permissions.ManagePortal,
                       default='Accept-Encoding, Accept-Language',
    #                   accessor='getVaryHeader',
    #                   edit_accessor='getVaryHeader',
    #                   mutator='setVaryHeader',
                       widget=atapi.StringWidget(label='Vary Header',
                                                 description='Value for the Vary header.  If you are using gzipping, you may need to include "Accept-Encoding" and possibly "User-Agent".  If you are running a multi-lingual site, you will need "Accept-Language".  Values should be separated by commas.',
                       write_permission = permissions.ManagePortal,
                       widget=atapi.BooleanWidget(label='Enable Macro Caching',
                                                  description='Enable caching of METAL macros.  Macros registered in the "Cached Macros" tab will be cached.  This feature is still experimental.',),
                       write_permission = permissions.ManagePortal,

RULES_ID = 'rules'
RULE_TYPES = ('ContentCacheRule','TemplateCacheRule','PolicyHTTPCacheManagerCacheRule')
HEADERSETS_ID = 'headersets'
HEADERSET_TYPES = ('HeaderSet',)
MACROS_ID = 'cached_macros'
MACRO_TYPES = ('MacroRule',)

00090 class CacheTool(atapi.BaseFolder,UniqueObject):
    archetype_name = 'Cache Configuration Tool'
    portal_type = meta_type = 'CacheTool'
    security = ClassSecurityInfo()
    schema = schema
    content_icon = 'cachesetup_tool_icon.gif'
    global_allow = 0
    _catalog_count = None
    _permission_count = None

    actions = (
        {'action':      'string:$object_url/cache_setup_config',
         'category':    'object',
         'id':          'view',
         'name':        'Cache Setup',
         'permissions': (permissions.ManagePortal,),
         'visible':     False

    aliases = {
        '(Default)':    'cache_setup_config',
        'view' :        'cache_setup_config',
        'edit' :        'base_edit'

    def initializeArchetype(self, **kwargs):
        # get values from other places before archetypes initializes them
        squid_urls = self._getSquidUrls()
        atapi.BaseFolder.initializeArchetype(self, **kwargs)
        # don't stomp on squid urls

    def at_post_edit_script(self):
        # tool should not appear in portal_catalog

    def setCacheConfig(self, value):
        old_value = self.getCacheConfig()
        self.getField('cacheConfig').set(self, value)
        if value != old_value:
            # only update rule when value changes
            rules = self.getRules()
            rule = getattr(rules, 'plone-content-types', None)
            if rule is not None:
                if value in ['squid', 'squid_behind_apache']:
                    # update the rule if it has not been changed already
                    if rule.getHeaderSetIdAnon() == 'cache-in-memory':
                elif value in ['zserver', 'apache']:
                    # update the rule if it has not been changed already
                    if rule.getHeaderSetIdAnon() == 'cache-in-proxy-1-hour':

    def getRules(self):
        rules = getattr(self, RULES_ID, None)
        if rules is None:
            self.allowed_content_types = ('RuleFolder')
            self.invokeFactory(id=RULES_ID, type_name='RuleFolder')
            self.allowed_content_types = ()
            rules = self.getRules()
        return rules

    def getMacros(self):
        macros = getattr(self, MACROS_ID, None)
        if macros is None:
            self.allowed_content_types = ('MacroFolder')
            self.invokeFactory(id=MACROS_ID, type_name='MacroFolder')
            self.allowed_content_types = ()
            macros = self.getMacros()
        return macros

    def getHeaderSets(self):
        header_sets = getattr(self, HEADERSETS_ID, None)
        if header_sets is None:
            # add header set folder
            self.allowed_content_types = ('HeaderSetFolder')
            self.invokeFactory(id=HEADERSETS_ID, type_name='HeaderSetFolder')
            self.allowed_content_types = ()
            header_sets = self.getHeaderSets()
        return header_sets

    def getHeaderSetById(self, id):
        return getattr(self.getHeaderSets(), id)

    # ##### Counters for use in ETag/cache key building #####

    def incrementCatalogCount(self):
        # catalog_count is a minimal counter object that we will increment 
        # every time an object is indexed/reindexed/unindexed -- we will
        # then use this for cache invalidation
        if self._catalog_count is None:
            self._catalog_count = Length.Length()

    def getCatalogCount(self):
        return self._catalog_count()

    def incrementPermissionCount(self):
        # permission_count is a minimal counter object that we will increment every
        # time the relationship between roles and permissions changes.  We will use
        # this value for cache invalidation
        if self._permission_count is None:
            self._permission_count = Length.Length()

    def getPermissionCount(self):
        if self._permission_count is None:
            self._permission_count = Length.Length()
        return self._permission_count()

    # ##### Accessors, mutators, and helper methods used in configuration ######

    def _getCompleteUrl(self, url):
        if url.find('//') == -1:
            url = 'http://' + url
        p = urlparse.urlparse(url)
        protocol = p[0]
        if not protocol:
            protocol = 'http'
        host = p[1]
        split_host = host.split(':')
        if len(split_host) == 1:
            if protocol == 'https':
                port = '443'
                port = '80'
            host = split_host[0] + ':' + port
        return urlparse.urlunparse((protocol, host, '','','',''))

    def _getSquidUrls(self):
        # get a list of urls from squid
        squid_tool = getToolByName(self, 'portal_squid')
        return tuple([url for url in squid_tool.getSquidURLs().split('\n') if url])
    def _setSquidUrls(self, list_of_urls):
        # pass a \n-joined list of urls to squid tool
        squid_tool = getToolByName(self, 'portal_squid')

    def hasPurgeableProxy(self):
        return self.getCacheConfig() in ('squid', 'squid_behind_apache')

    def getDomains(self):
        if self.getCacheConfig() == 'squid_behind_apache':
            return self.getField('domains').get(self)
            return self._getSquidUrls()

    security.declareProtected(permissions.ManagePortal, 'setDomains')
    def setDomains(self, value):
        if value is None:
            value = ''
        if type(value) == type(''):
            value = value.replace('\r','\n')
            value = value.split('\n')
        value = [v.strip() for v in value if v]
        domains = []
        for v in value:
        if self.getCacheConfig() == 'squid_behind_apache':
            self.getField('domains').set(self, domains)

    security.declareProtected(permissions.View, 'getSquidURLs')
    def getSquidURLs(self):
        if self.getCacheConfig() == 'squid_behind_apache':
            return self._getSquidUrls()
            return ''

    security.declareProtected(permissions.ManagePortal, 'setSquidURLs')
    def setSquidURLs(self, value):
        if self.getCacheConfig() == 'squid_behind_apache':
            if value is None:
                value = ''
            if type(value) == type(''):
                value = value.replace('\r','\n')
                value = value.split('\n')
            self._setSquidUrls([self._getCompleteUrl(v) for v in value if v])

    def post_validate(self, REQUEST, errors):
        cache_config = REQUEST.get('cacheConfig',None)
        squid_urls = REQUEST.get('squidURLs',None)
        if cache_config == 'squid_behind_apache':
            if not squid_urls:
                errors['squidURLs'] = 'Please enter the URLs for your squid caches'
            if squid_urls:
                errors['squidURLs'] = 'Set this field only if using squid behind apache'

        domains = REQUEST.get('domains', None)
        if cache_config in ('squid', 'squid_behind_apache'):
            if not domains:
                errors['domains'] = 'Please enter the domains that you will be caching'

        gzip = REQUEST.get('gzip',None)
        vary_header = REQUEST.get('varyHeader','')
        values = [v.strip() for v in vary_header.split(',')]
        if gzip in ('accept-encoding', 'accept-encoding+user-agent'):
            if not 'Accept-Encoding' in values:
                errors['varyHeader'] = 'When Compression is set to "%s", you need "Accept-Encoding" in the Vary header' % gzip
            if gzip == 'accept-encoding+user-agent':
                if not 'User-Agent' in values:
                    errors['varyHeader'] = 'For Compression is set to %s, you need "User-Agent" in the Vary header' % gzip

    #security.declareProtected(permissions.View, 'getVaryHeader')
    #def getVaryHeader(self):
    #    header_sets = self.getHeaderSets()
    #    values = header_sets.objectValues()
    #    if not values:
    #        return ''
    #    return header_sets.objectValues()[0].getVary()

    #security.declareProtected(permissions.ManagePortal, 'setVaryHeader')
    #def setVaryHeader(self, value):
    #    header_sets = self.getHeaderSets()
    #    for hs in header_sets.objectValues():
    #        hs.setVary(value)

    def validate_enableMacroCaching(self, value):
        if not (value and value != '0'):
        # see if we have plone 2.0 or 2.1
        ivt = getattr(self.portal_migration, 'getInstanceVersionTuple', None)
        if ivt is None:
            ploneVersion = (2, 0)
            ploneVersion = ivt()
        if ploneVersion[0] != 2 or ploneVersion[1] > 1:
            return 'Macro caching is currently only available for Plone 2.0 and 2.1'
        if not config.ENABLE_MACRO_CACHE:
            return 'Macro caching is disabled.  Set ENABLE_MACRO_CACHE to True in CacheSetup/config.py and restart Zope to enable it.'

    def getEnableMacroCaching(self):
        if not config.ENABLE_MACRO_CACHE:
            return False
        return self.getField('enableMacroCaching').get(self)
    security.declareProtected(permissions.ManagePortal, 'manage_purgePageCache')
00340     def manage_purgePageCache(self, REQUEST=None):
        """Purge the page cache manager"""
        pc = getToolByName(self, config.PAGE_CACHE_MANAGER_ID)
        if REQUEST is not None:
            url = REQUEST.get('HTTP_REFERER', self.absolute_url()+'/edit')
            msg = 'portal_status_message=Page+cache+purged'
            if url.find('?') != -1:
                url += '&' + msg
                url += '?' + msg
            return REQUEST.RESPONSE.redirect(url)

    security.declareProtected(permissions.ManagePortal, 'manage_purgeMacroCache')
00354     def manage_purgeMacroCache(self, REQUEST=None):
        """Purge the macro cache"""
        if REQUEST is not None:
            url = REQUEST.get('HTTP_REFERER', self.absolute_url()+'/edit')
            msg = 'portal_status_message=Macro+cache+purged'
            if url.find('?') != -1:
                url += '&' + msg
                url += '?' + msg
            return REQUEST.RESPONSE.redirect(url)

    # ##### Helper methods used for building ETags and for header setting ######

00368     def canAnonymousView(self, object):
        """Returns True if anonymous users can view an object"""
        if 'Anonymous' in rolesForPermissionOn('View', object):
            return True
        # XXX i am not sure it is possible to assign local roles to the anonymous user
        # XXX if it is, there may need to be some local role tomfoolery here
        # XXX something like the following
        # roles_with_view = {}
        # for r in rolesForPermissionOn('View', obj):
        #    roles_with_view[r] = 1
        # try:
        #    all_local_roles = portal.acl_users._getAllLocalRoles(obj)
        # except AttributeError:
        #    all_local_roles = _mergedLocalRoles(obj)
        # if 'Anonymous user' in all_local_roles:
        #    for r in all_local_roles['Anonymous user']:
        #       if r in roles_with_view:
        #          return True
        return False

00389     def isGzippable(self, css=0, js=0, REQUEST=None):
        """Indicate whether gzipping is allowed for the current request.  Returns
           a tuple.  The first argument indicates whether gzipping should be enabled,
           the second indicates whether gzipping should be forced, and the third
           whether the browser will accept gzipped content."""
        # force: force http compression even if the browser doesn't send an accept
        # debug: return compression state (0: no, 1: yes, 2: force)
        # css: set this to 1 inside a css file (for later use)
        # js: set this to 1 inside a js file (for later use)

        if REQUEST is None:
            REQUEST = self.REQUEST
        use_gzip = self.getGzip()

        force = 0
        if use_gzip == 'never':
            enable_compression = 0
        elif use_gzip == 'always':
            enable_compression = 1
            force = 1
        elif use_gzip == 'accept-encoding':
            # compress everything except css and js
            enable_compression = 1
        elif use_gzip == 'accept-encoding+user-agent':
            # gzip compatibility info courtesy of
            # http://httpd.apache.org/docs/2.2/mod/mod_deflate.html
            user_agent = REQUEST.get('HTTP_USER_AGENT', '')
            if user_agent.startswith('Mozilla/4'):
                # Netscape 4.x can't handle gzipped css and js
                enable_compression = (css==0 and js==0)
            # Netscape 4.0.6-4.0.8 has some gzip-related bugs
            if user_agent[len('Mozilla/4.')] in ('6','7','8'):
                enable_compression = 0
            # Some versions of MSIE pretend to be Netscape 4.x but are OK with gzipping
            if user_agent.find('MSIE'):
                enable_compression = 1

        return (enable_compression, force, REQUEST.get('HTTP_ACCEPT_ENCODING', '').find('gzip') != -1)

    # ##### Main methods ######

00431     def getRuleAndHeaderSet(self, request, object, view, member):
        """Get the caching rule that applies here and the header set specified by the rule"""
        rules = self.getRules().objectValues()
        for rule in rules:
                header_set = rule.getHeaderSet(request, object, view, member)
                if header_set is not None:
                    return (rule, header_set)
            except ConflictError:
        return (None, None)

00446     def getUrlsToPurge(self, object):
        """Get a list of URLs to be purged when the given object is added / modified / deleted"""

        # if nothing to purge, return an empty list
        if not self.hasPurgeableProxy():
            return []
        relative_urls = sets.Set()
        rules = self.getRules().objectValues()
        for rule in rules:
                rule.getRelativeUrlsToPurge(object, relative_urls)
            except ConflictError:
        relative_urls = list(relative_urls)
        if relative_urls:
            if self.getCacheConfig() == 'squid_behind_apache':
                # assumes urls passed to squid by apache take the form
                domains = self.getDomains()
                prefixes = []
                for d in domains:
                    p = urlparse.urlparse(d)
                    protocol = p[0]
                    host = p[1]
                    split_host = host.split(':')
                    host = split_host[0]
                    port = split_host[1]
                    prefixes.append('%s/%s/%s/' % (protocol, host, port))
                relative_urls = [prefix+url for prefix in prefixes \
                                            for url in relative_urls]
        return relative_urls

    # A few helper methods
00483     def getMember(self):
        """Utility method for getting a member for use in expression contexts.  Returns
           the Member object for the currently authenticated member or None if the
           user is not authenticated."""
        pm = getToolByName(self, 'portal_membership', None)
        # stick to the CachingPolicyManager expression convention
        if not pm or pm.isAnonymousUser():
            return None 
            return pm.getAuthenticatedMember()
    # a few methods for generating non-hideous default ids
    security.declareProtected(permissions.ManagePortal, 'generateUniqueId')
    def generateUniqueId(self, type_name):
        context = self.REQUEST.PARENTS[0]
        question_ids = context.objectIds()
        n = len(question_ids)+1
        while str(n) in question_ids:
            n = n + 1
        return str(n)

    def _isIDAutoGenerated(self, id):
            return True
            return False

Generated by  Doxygen 1.6.0   Back to index