""" 
Time getting acl from a pickle

Usage: time_cache.py count
"""

try:    import cPickle as pickleexcept ImportError:    import pickle# Set pickle protocol, see http://docs.python.org/lib/node64.htmltry:    # Requires 2.3    PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOLexcept AttributeError:    # Use protocol 1, binary format compatible with all python versions    PICKLE_PROTOCOL = 1
    
import time
import sys
import os
import dircache
import random
import sha
import base64

random.seed()

READ_NA = 0x1
READ_ON = 0x2
WRITE_NA = 0x4
WRITE_ON = 0x8
DELETE_NA = 0x16
DELETE_ON = 0x32
REVERT_NA = 0x64
REVERT_ON = 0x128
ADMIN_NA = 0x256
ADMIN_ON = 0x512

ACL = [
    # Entry, rightsdict
    #+WikiAdmin:read,write,delete,revert,admin
    ('WikiAdmin', READ_ON | WRITE_ON | DELETE_ON | REVERT_ON | ADMIN_ON),
    # Example of a modifier
    # +EditorsGroup:read,write,delete,revert
    ('EditorsGroup', READ_ON | WRITE_ON | DELETE_ON | REVERT_ON | ADMIN_NA),
    ('All', READ_ON),
##    ('WikiAdmin', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 1}),
##    ('EditorsGroup', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 0}),
##    ('All', {'read': 1, 'write': 0, 'delete': 0, 'revert': 0, 'admin': 0}),
    ]


def set_cache(key, value):
    value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
    try:
        f = file(os.path.join('cache', key), 'w')
        f.write(value)
    finally:
        f.close()  


def get_cache(key):
    try:
        f = file(os.path.join('cache', key))
        value = f.read()
    finally:
        f.close()
    value = pickle.loads(value)
    return value

  
def make_pages(count=1000):
    """ Make page directories
    
    Assume use of hierarchiacl acl, only 20% of page has acl
    """
    clean()
    
    os.mkdir('pages')
    os.mkdir('cache')
    
    for i in range(count):
        # create random names to remove the effect of disk caching
        name = '%s.%s' % (time.time(), random.randint(1, sys.maxint))
        path = os.path.join('pages', name, 'cache')
        os.makedirs(path)
        metafile = os.path.join(path, 'meta')
        meta = {}
        
        # About 20% precent of pages have acl
        if random.random() < 0.2:
            meta['acl'] = ACL
        else:
            meta['acl'] = None
            
        # About 5% page are deleted
        meta['exists'] = random.random() < 0.95
        
        # Pages revision
        meta['revision'] = random.randint(1, 100)
        
        pickle.dump(meta, file(metafile, 'w'), pickle.HIGHEST_PROTOCOL)


def make_meta_cache():
    """ Get meta data from all pages and cache in one file """
    start = time.time()
    names = [name for name in dircache.listdir('pages') 
             if not name.startswith('.')]
    meta = {}
    for name in names:
        path = os.path.join('pages', name, 'cache', 'meta')
        try:
            f = file(path)
            value = pickle.load(f)
        finally:
            f.close()
        meta[name] = (value['exists'], value['acl'])
    
    set_cache('meta', meta)
    print '    Create meta cache: %.8f' % (time.time() - start)
    
    
def get_meta():
    start = time.time()
    meta = get_cache('meta')
    print '    Get meta from cache: %.8f' % (time.time() - start)
    return meta


def set_meta(key, value):
    meta = get_cache('meta')
    meta[key] = value
    set_cache('meta', meta)


def edit_meta():
    start = time.time()
    # Example: page deleted, use old revision acl
    set_meta('%016d' % 1, (0, ACL))
    print '    Edit acl cache: %.8f' % (time.time() - start)


def clean():
    os.system('rm -rf pages')
    os.system('rm -rf cache')


if __name__ == '__main__':
    
    try:
        count = int(sys.argv[1])
        print 'Test cache for %d pages:' % count
        make_pages(count)
        make_meta_cache()
        get_meta()
        edit_meta()            
        clean()
    except (IndexError, ValueError):
        print __doc__
    
    