summaryrefslogtreecommitdiff
path: root/sqlparse
diff options
context:
space:
mode:
authorJesús Leganés Combarro "Piranna" <piranna@gmail.com>2012-05-23 20:59:25 +0200
committerJesús Leganés Combarro "Piranna" <piranna@gmail.com>2012-05-23 20:59:25 +0200
commit7728323fb24d825a47ff29d85d9a64667de7e47f (patch)
tree4745ce793d7b7ed78e6eff90d2c0d489ed5e5bf3 /sqlparse
parent9f717a9b72124231e561fcd190eafeb021c22bdd (diff)
parent26986faad381f72078921d15f777024ff88fd15c (diff)
downloadsqlparse-7728323fb24d825a47ff29d85d9a64667de7e47f.tar.gz
Merge branch 'master' into milestone_0.1.5
Diffstat (limited to 'sqlparse')
-rw-r--r--sqlparse/engine/__init__.py2
-rw-r--r--sqlparse/utils.py64
2 files changed, 56 insertions, 10 deletions
diff --git a/sqlparse/engine/__init__.py b/sqlparse/engine/__init__.py
index c30b6ca..3e2822b 100644
--- a/sqlparse/engine/__init__.py
+++ b/sqlparse/engine/__init__.py
@@ -61,8 +61,6 @@ class FilterStack(object):
def _run1(stream):
ret = []
for stmt in stream:
- for i in stmt.flatten():
- print repr(i)
for filter_ in self.stmtprocess:
filter_.process(self, stmt)
ret.append(stmt)
diff --git a/sqlparse/utils.py b/sqlparse/utils.py
index 6321353..cdf27b1 100644
--- a/sqlparse/utils.py
+++ b/sqlparse/utils.py
@@ -4,6 +4,61 @@ Created on 17/05/2012
@author: piranna
'''
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+
+if OrderedDict:
+ class Cache(OrderedDict):
+ """Cache with LRU algorithm using an OrderedDict as basis
+ """
+ def __init__(self, maxsize=100):
+ OrderedDict.__init__(self)
+
+ self._maxsize = maxsize
+
+ def __getitem__(self, key, *args, **kwargs):
+ # Get the key and remove it from the cache, or raise KeyError
+ value = OrderedDict.__getitem__(self, key)
+ del self[key]
+
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value)
+
+ # Return the value from the cache
+ return value
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ # Key was inserted before, remove it so we put it at front later
+ if key in self:
+ del self[key]
+
+ # Too much items on the cache, remove the least recent used
+ elif len(self) >= self._maxsize:
+ self.popitem(False)
+
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value, *args, **kwargs)
+
+else:
+ class Cache(dict):
+ """Cache that reset when gets full
+ """
+ def __init__(self, maxsize=100):
+ dict.__init__(self)
+
+ self._maxsize = maxsize
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ # Reset the cache if we have too much cached entries and start over
+ if len(self) >= self._maxsize:
+ self.clear()
+
+ # Insert the (key, value) pair on the front of the cache
+ dict.__setitem__(self, key, value, *args, **kwargs)
+
def memoize_generator(func):
"""Memoize decorator for generators
@@ -13,7 +68,7 @@ def memoize_generator(func):
Obviusly, this is only useful if the generator will always return the same
values for each specific parameters...
"""
- cache = {}
+ cache = Cache()
def wrapped_func(*args, **kwargs):
# params = (args, kwargs)
@@ -21,17 +76,10 @@ def memoize_generator(func):
# Look if cached
try:
- print params
cached = cache[params]
# Not cached, exec and store it
except KeyError:
- # Reset the cache if we have too much cached entries and start over
- # In the future would be better to use an OrderedDict and drop the
- # Least Recent Used entries
- if len(cache) >= 10:
- cache.clear()
-
cached = []
for item in func(*args, **kwargs):