summaryrefslogtreecommitdiffstats
path: root/webapp/django/core
diff options
context:
space:
mode:
Diffstat (limited to 'webapp/django/core')
-rw-r--r--webapp/django/core/__init__.py0
-rw-r--r--webapp/django/core/cache/__init__.py64
-rw-r--r--webapp/django/core/cache/backends/__init__.py0
-rw-r--r--webapp/django/core/cache/backends/base.py75
-rw-r--r--webapp/django/core/cache/backends/db.py89
-rw-r--r--webapp/django/core/cache/backends/dummy.py25
-rw-r--r--webapp/django/core/cache/backends/filebased.py152
-rw-r--r--webapp/django/core/cache/backends/locmem.py129
-rw-r--r--webapp/django/core/cache/backends/memcached.py47
-rw-r--r--webapp/django/core/context_processors.py85
-rw-r--r--webapp/django/core/exceptions.py34
-rw-r--r--webapp/django/core/files/__init__.py1
-rw-r--r--webapp/django/core/files/base.py169
-rw-r--r--webapp/django/core/files/images.py42
-rw-r--r--webapp/django/core/files/locks.py70
-rw-r--r--webapp/django/core/files/move.py88
-rw-r--r--webapp/django/core/files/storage.py224
-rw-r--r--webapp/django/core/files/temp.py62
-rw-r--r--webapp/django/core/files/uploadedfile.py156
-rw-r--r--webapp/django/core/files/uploadhandler.py213
-rw-r--r--webapp/django/core/handlers/__init__.py0
-rw-r--r--webapp/django/core/handlers/base.py200
-rw-r--r--webapp/django/core/handlers/modpython.py210
-rw-r--r--webapp/django/core/handlers/profiler-hotshot.py22
-rw-r--r--webapp/django/core/handlers/wsgi.py238
-rw-r--r--webapp/django/core/mail.py371
-rw-r--r--webapp/django/core/management/__init__.py334
-rw-r--r--webapp/django/core/management/base.py242
-rw-r--r--webapp/django/core/management/color.py41
-rw-r--r--webapp/django/core/management/commands/__init__.py0
-rw-r--r--webapp/django/core/management/commands/adminindex.py34
-rw-r--r--webapp/django/core/management/commands/cleanup.py11
-rw-r--r--webapp/django/core/management/commands/compilemessages.py57
-rw-r--r--webapp/django/core/management/commands/createcachetable.py42
-rw-r--r--webapp/django/core/management/commands/dbshell.py10
-rw-r--r--webapp/django/core/management/commands/diffsettings.py32
-rw-r--r--webapp/django/core/management/commands/dumpdata.py52
-rw-r--r--webapp/django/core/management/commands/flush.py70
-rw-r--r--webapp/django/core/management/commands/inspectdb.py125
-rw-r--r--webapp/django/core/management/commands/loaddata.py181
-rw-r--r--webapp/django/core/management/commands/makemessages.py205
-rw-r--r--webapp/django/core/management/commands/reset.py52
-rw-r--r--webapp/django/core/management/commands/runfcgi.py20
-rw-r--r--webapp/django/core/management/commands/runserver.py77
-rw-r--r--webapp/django/core/management/commands/shell.py59
-rw-r--r--webapp/django/core/management/commands/sql.py10
-rw-r--r--webapp/django/core/management/commands/sqlall.py10
-rw-r--r--webapp/django/core/management/commands/sqlclear.py10
-rw-r--r--webapp/django/core/management/commands/sqlcustom.py10
-rw-r--r--webapp/django/core/management/commands/sqlflush.py10
-rw-r--r--webapp/django/core/management/commands/sqlindexes.py10
-rw-r--r--webapp/django/core/management/commands/sqlinitialdata.py7
-rw-r--r--webapp/django/core/management/commands/sqlreset.py10
-rw-r--r--webapp/django/core/management/commands/sqlsequencereset.py9
-rw-r--r--webapp/django/core/management/commands/startapp.py46
-rw-r--r--webapp/django/core/management/commands/startproject.py38
-rw-r--r--webapp/django/core/management/commands/syncdb.py152
-rw-r--r--webapp/django/core/management/commands/test.py35
-rw-r--r--webapp/django/core/management/commands/testserver.py36
-rw-r--r--webapp/django/core/management/commands/validate.py9
-rw-r--r--webapp/django/core/management/sql.py205
-rw-r--r--webapp/django/core/management/validation.py221
-rw-r--r--webapp/django/core/paginator.py120
-rw-r--r--webapp/django/core/serializers/__init__.py112
-rw-r--r--webapp/django/core/serializers/base.py175
-rw-r--r--webapp/django/core/serializers/json.py68
-rw-r--r--webapp/django/core/serializers/python.py108
-rw-r--r--webapp/django/core/serializers/pyyaml.py51
-rw-r--r--webapp/django/core/serializers/xml_serializer.py238
-rw-r--r--webapp/django/core/servers/__init__.py0
-rw-r--r--webapp/django/core/servers/basehttp.py665
-rw-r--r--webapp/django/core/servers/fastcgi.py179
-rw-r--r--webapp/django/core/signals.py5
-rw-r--r--webapp/django/core/template_loader.py7
-rw-r--r--webapp/django/core/urlresolvers.py330
-rw-r--r--webapp/django/core/validators.py598
-rw-r--r--webapp/django/core/xheaders.py24
77 files changed, 7918 insertions, 0 deletions
diff --git a/webapp/django/core/__init__.py b/webapp/django/core/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/webapp/django/core/__init__.py
diff --git a/webapp/django/core/cache/__init__.py b/webapp/django/core/cache/__init__.py
new file mode 100644
index 0000000000..93e7adb76e
--- /dev/null
+++ b/webapp/django/core/cache/__init__.py
@@ -0,0 +1,64 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/cache.txt for information on the public API.
+"""
+
+from cgi import parse_qsl
+from django.conf import settings
+from django.core import signals
+from django.core.cache.backends.base import InvalidCacheBackendError
+
+# Name for use in settings file --> name of module in "backends" directory.
+# Any backend scheme that is not in this dictionary is treated as a Python
+# import path to a custom backend.
+BACKENDS = {
+ 'memcached': 'memcached',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+def get_cache(backend_uri):
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ if scheme in BACKENDS:
+ module = __import__('django.core.cache.backends.%s' % BACKENDS[scheme], {}, {}, [''])
+ else:
+ module = __import__(scheme, {}, {}, [''])
+ return getattr(module, 'CacheClass')(host, params)
+
+cache = get_cache(settings.CACHE_BACKEND)
+
+# Some caches -- pythont-memcached in particular -- need to do a cleanup at the
+# end of a request cycle. If the cache provides a close() method, wire it up
+# here.
+if hasattr(cache, 'close'):
+ signals.request_finished.connect(cache.close)
+
diff --git a/webapp/django/core/cache/backends/__init__.py b/webapp/django/core/cache/backends/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/webapp/django/core/cache/backends/__init__.py
diff --git a/webapp/django/core/cache/backends/base.py b/webapp/django/core/cache/backends/base.py
new file mode 100644
index 0000000000..cb5fe06045
--- /dev/null
+++ b/webapp/django/core/cache/backends/base.py
@@ -0,0 +1,75 @@
+"Base Cache class."
+
+from django.core.exceptions import ImproperlyConfigured
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+class BaseCache(object):
+ def __init__(self, params):
+ timeout = params.get('timeout', 300)
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ def add(self, key, value, timeout=None):
+ """
+ Set a value in the cache if the key does not already exist. If
+ timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+
+ Returns True if the value was stored, False otherwise.
+ """
+ raise NotImplementedError
+
+ def get(self, key, default=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key) is not None
+
+ def __contains__(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ # This is a separate method, rather than just a copy of has_key(),
+ # so that it always has the same functionality as has_key(), even
+ # if a subclass overrides it.
+ return self.has_key(key)
diff --git a/webapp/django/core/cache/backends/db.py b/webapp/django/core/cache/backends/db.py
new file mode 100644
index 0000000000..d2b422af83
--- /dev/null
+++ b/webapp/django/core/cache/backends/db.py
@@ -0,0 +1,89 @@
+"Database cache backend."
+
+from django.core.cache.backends.base import BaseCache
+from django.db import connection, transaction, DatabaseError
+import base64, time
+from datetime import datetime
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ def get(self, key, default=None):
+ cursor = connection.cursor()
+ cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = datetime.now()
+ if row[2] < now:
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ transaction.commit_unless_managed()
+ return default
+ return pickle.loads(base64.decodestring(row[1]))
+
+ def set(self, key, value, timeout=None):
+ self._base_set('set', key, value, timeout)
+
+ def add(self, key, value, timeout=None):
+ return self._base_set('add', key, value, timeout)
+
+ def _base_set(self, mode, key, value, timeout=None):
+ if timeout is None:
+ timeout = self.default_timeout
+ cursor = connection.cursor()
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ now = datetime.now().replace(microsecond=0)
+ exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(cursor, now)
+ encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ try:
+ if mode == 'set' and cursor.fetchone():
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table, [encoded, str(exp), key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table, [key, encoded, str(exp)])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ return False
+ else:
+ transaction.commit_unless_managed()
+ return True
+
+ def delete(self, key):
+ cursor = connection.cursor()
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ transaction.commit_unless_managed()
+
+ def has_key(self, key):
+ cursor = connection.cursor()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ return cursor.fetchone() is not None
+
+ def _cull(self, cursor, now):
+ if self._cull_frequency == 0:
+ cursor.execute("DELETE FROM %s" % self._table)
+ else:
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, [str(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
+ cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
diff --git a/webapp/django/core/cache/backends/dummy.py b/webapp/django/core/cache/backends/dummy.py
new file mode 100644
index 0000000000..e479703f75
--- /dev/null
+++ b/webapp/django/core/cache/backends/dummy.py
@@ -0,0 +1,25 @@
+"Dummy cache backend"
+
+from django.core.cache.backends.base import BaseCache
+
+class CacheClass(BaseCache):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def add(self, *args, **kwargs):
+ return True
+
+ def get(self, key, default=None):
+ return default
+
+ def set(self, *args, **kwargs):
+ pass
+
+ def delete(self, *args, **kwargs):
+ pass
+
+ def get_many(self, *args, **kwargs):
+ return {}
+
+ def has_key(self, *args, **kwargs):
+ return False
diff --git a/webapp/django/core/cache/backends/filebased.py b/webapp/django/core/cache/backends/filebased.py
new file mode 100644
index 0000000000..181197a8d7
--- /dev/null
+++ b/webapp/django/core/cache/backends/filebased.py
@@ -0,0 +1,152 @@
+"File-based cache backend"
+
+import os
+import time
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache
+from django.utils.hashcompat import md5_constructor
+
+class CacheClass(BaseCache):
+ def __init__(self, dir, params):
+ BaseCache.__init__(self, params)
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+
+ def add(self, key, value, timeout=None):
+ if self.has_key(key):
+ return False
+
+ self.set(key, value, timeout)
+ return True
+
+ def get(self, key, default=None):
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ f.close()
+ self._delete(fname)
+ else:
+ return pickle.load(f)
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=None):
+ fname = self._key_to_file(key)
+ dirname = os.path.dirname(fname)
+
+ if timeout is None:
+ timeout = self.default_timeout
+
+ self._cull()
+
+ try:
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ f = open(fname, 'wb')
+ now = time.time()
+ pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL)
+ pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key):
+ try:
+ self._delete(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def _delete(self, fname):
+ os.remove(fname)
+ try:
+ # Remove the 2 subdirs if they're empty
+ dirname = os.path.dirname(fname)
+ os.rmdir(dirname)
+ os.rmdir(os.path.dirname(dirname))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key):
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ f.close()
+ self._delete(fname)
+ return False
+ else:
+ return True
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ return False
+
+ def _cull(self):
+ if int(self._num_entries) < self._max_entries:
+ return
+
+ try:
+ filelist = os.listdir(self._dir)
+ except (IOError, OSError):
+ return
+
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+
+ for topdir in doomed:
+ try:
+ for root, _, files in os.walk(topdir):
+ for f in files:
+ self._delete(os.path.join(root, f))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir
+
+ def _key_to_file(self, key):
+ """
+ Convert the filename into an md5 string. We'll turn the first couple
+ bits of the path into directory prefixes to be nice to filesystems
+ that have problems with large numbers of files in a directory.
+
+ Thus, a cache key of "foo" gets turnned into a file named
+ ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
+ """
+ path = md5_constructor(key.encode('utf-8')).hexdigest()
+ path = os.path.join(path[:2], path[2:4], path[4:])
+ return os.path.join(self._dir, path)
+
+ def _get_num_entries(self):
+ count = 0
+ for _,_,files in os.walk(self._dir):
+ count += len(files)
+ return count
+ _num_entries = property(_get_num_entries)
diff --git a/webapp/django/core/cache/backends/locmem.py b/webapp/django/core/cache/backends/locmem.py
new file mode 100644
index 0000000000..15a169dc37
--- /dev/null
+++ b/webapp/django/core/cache/backends/locmem.py
@@ -0,0 +1,129 @@
+"Thread-safe in-memory cache backend."
+
+import time
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache
+from django.utils.synch import RWLock
+
+class CacheClass(BaseCache):
+ def __init__(self, _, params):
+ BaseCache.__init__(self, params)
+ self._cache = {}
+ self._expire_info = {}
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self._lock = RWLock()
+
+ def add(self, key, value, timeout=None):
+ self._lock.writer_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None or exp <= time.time():
+ try:
+ self._set(key, pickle.dumps(value), timeout)
+ return True
+ except pickle.PickleError:
+ pass
+ return False
+ finally:
+ self._lock.writer_leaves()
+
+ def get(self, key, default=None):
+ self._lock.reader_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return default
+ elif exp > time.time():
+ try:
+ return pickle.loads(self._cache[key])
+ except pickle.PickleError:
+ return default
+ finally:
+ self._lock.reader_leaves()
+ self._lock.writer_enters()
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ return default
+ finally:
+ self._lock.writer_leaves()
+
+ def _set(self, key, value, timeout=None):
+ if len(self._cache) >= self._max_entries:
+ self._cull()
+ if timeout is None:
+ timeout = self.default_timeout
+ self._cache[key] = value
+ self._expire_info[key] = time.time() + timeout
+
+ def set(self, key, value, timeout=None):
+ self._lock.writer_enters()
+ # Python 2.3 and 2.4 don't allow combined try-except-finally blocks.
+ try:
+ try:
+ self._set(key, pickle.dumps(value), timeout)
+ except pickle.PickleError:
+ pass
+ finally:
+ self._lock.writer_leaves()
+
+ def has_key(self, key):
+ self._lock.reader_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return False
+ elif exp > time.time():
+ return True
+ finally:
+ self._lock.reader_leaves()
+
+ self._lock.writer_enters()
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ return False
+ finally:
+ self._lock.writer_leaves()
+
+ def _cull(self):
+ if self._cull_frequency == 0:
+ self._cache.clear()
+ self._expire_info.clear()
+ else:
+ doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
+ for k in doomed:
+ self._delete(k)
+
+ def _delete(self, key):
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ try:
+ del self._expire_info[key]
+ except KeyError:
+ pass
+
+ def delete(self, key):
+ self._lock.writer_enters()
+ try:
+ self._delete(key)
+ finally:
+ self._lock.writer_leaves()
diff --git a/webapp/django/core/cache/backends/memcached.py b/webapp/django/core/cache/backends/memcached.py
new file mode 100644
index 0000000000..beb8844ec1
--- /dev/null
+++ b/webapp/django/core/cache/backends/memcached.py
@@ -0,0 +1,47 @@
+"Memcached cache backend"
+
+from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
+from django.utils.encoding import smart_unicode, smart_str
+
+try:
+ import cmemcache as memcache
+except ImportError:
+ try:
+ import memcache
+ except:
+ raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
+
+class CacheClass(BaseCache):
+ def __init__(self, server, params):
+ BaseCache.__init__(self, params)
+ self._cache = memcache.Client(server.split(';'))
+
+ def add(self, key, value, timeout=0):
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ return self._cache.add(smart_str(key), value, timeout or self.default_timeout)
+
+ def get(self, key, default=None):
+ val = self._cache.get(smart_str(key))
+ if val is None:
+ return default
+ else:
+ if isinstance(val, basestring):
+ return smart_unicode(val)
+ else:
+ return val
+
+ def set(self, key, value, timeout=0):
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ self._cache.set(smart_str(key), value, timeout or self.default_timeout)
+
+ def delete(self, key):
+ self._cache.delete(smart_str(key))
+
+ def get_many(self, keys):
+ return self._cache.get_multi(map(smart_str,keys))
+
+ def close(self, **kwargs):
+ self._cache.disconnect_all()
+
diff --git a/webapp/django/core/context_processors.py b/webapp/django/core/context_processors.py
new file mode 100644
index 0000000000..cb07125ce7
--- /dev/null
+++ b/webapp/django/core/context_processors.py
@@ -0,0 +1,85 @@
+"""
+A set of request processors that return dictionaries to be merged into a
+template context. Each function takes the request object as its only parameter
+and returns a dictionary to add to the context.
+
+These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
+RequestContext.
+"""
+
+from django.conf import settings
+
+def auth(request):
+ """
+ Returns context variables required by apps that use Django's authentication
+ system.
+
+ If there is no 'user' attribute in the request, uses AnonymousUser (from
+ django.contrib.auth).
+ """
+ if hasattr(request, 'user'):
+ user = request.user
+ else:
+ from django.contrib.auth.models import AnonymousUser
+ user = AnonymousUser()
+ return {
+ 'user': user,
+ 'messages': user.get_and_delete_messages(),
+ 'perms': PermWrapper(user),
+ }
+
+def debug(request):
+ "Returns context variables helpful for debugging."
+ context_extras = {}
+ if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+ context_extras['debug'] = True
+ from django.db import connection
+ context_extras['sql_queries'] = connection.queries
+ return context_extras
+
+def i18n(request):
+ from django.utils import translation
+
+ context_extras = {}
+ context_extras['LANGUAGES'] = settings.LANGUAGES
+ context_extras['LANGUAGE_CODE'] = translation.get_language()
+ context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
+
+ return context_extras
+
+def media(request):
+ """
+ Adds media-related context variables to the context.
+
+ """
+ return {'MEDIA_URL': settings.MEDIA_URL}
+
+def request(request):
+ return {'request': request}
+
+# PermWrapper and PermLookupDict proxy the permissions system into objects that
+# the template system can understand.
+
+class PermLookupDict(object):
+ def __init__(self, user, module_name):
+ self.user, self.module_name = user, module_name
+
+ def __repr__(self):
+ return str(self.user.get_all_permissions())
+
+ def __getitem__(self, perm_name):
+ return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
+
+ def __nonzero__(self):
+ return self.user.has_module_perms(self.module_name)
+
+class PermWrapper(object):
+ def __init__(self, user):
+ self.user = user
+
+ def __getitem__(self, module_name):
+ return PermLookupDict(self.user, module_name)
+
+ def __iter__(self):
+ # I am large, I contain multitudes.
+ raise TypeError("PermWrapper is not iterable.")
diff --git a/webapp/django/core/exceptions.py b/webapp/django/core/exceptions.py
new file mode 100644
index 0000000000..e5df8caca8
--- /dev/null
+++ b/webapp/django/core/exceptions.py
@@ -0,0 +1,34 @@
+"Global Django exceptions"
+
+class ObjectDoesNotExist(Exception):
+ "The requested object does not exist"
+ silent_variable_failure = True
+
+class MultipleObjectsReturned(Exception):
+ "The query returned multiple objects when only one was expected."
+ pass
+
+class SuspiciousOperation(Exception):
+ "The user did something suspicious"
+ pass
+
+class PermissionDenied(Exception):
+ "The user did not have permission to do that"
+ pass
+
+class ViewDoesNotExist(Exception):
+ "The requested view does not exist"
+ pass
+
+class MiddlewareNotUsed(Exception):
+ "This middleware is not used in this server configuration"
+ pass
+
+class ImproperlyConfigured(Exception):
+ "Django is somehow improperly configured"
+ pass
+
+class FieldError(Exception):
+ """Some kind of problem with a model field."""
+ pass
+
diff --git a/webapp/django/core/files/__init__.py b/webapp/django/core/files/__init__.py
new file mode 100644
index 0000000000..0c3ef57af8
--- /dev/null
+++ b/webapp/django/core/files/__init__.py
@@ -0,0 +1 @@
+from django.core.files.base import File
diff --git a/webapp/django/core/files/base.py b/webapp/django/core/files/base.py
new file mode 100644
index 0000000000..69739d6488
--- /dev/null
+++ b/webapp/django/core/files/base.py
@@ -0,0 +1,169 @@
+import os
+
+from django.utils.encoding import smart_str, smart_unicode
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+class File(object):
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, file):
+ self.file = file
+ self._name = file.name
+ self._mode = file.mode
+ self._closed = False
+
+ def __str__(self):
+ return smart_str(self.name or '')
+
+ def __unicode__(self):
+ return smart_unicode(self.name or u'')
+
+ def __repr__(self):
+ return "<%s: %s>" % (self.__class__.__name__, self or "None")
+
+ def __nonzero__(self):
+ return not not self.name
+
+ def __len__(self):
+ return self.size
+
+ def _get_name(self):
+ return self._name
+ name = property(_get_name)
+
+ def _get_mode(self):
+ return self._mode
+ mode = property(_get_mode)
+
+ def _get_closed(self):
+ return self._closed
+ closed = property(_get_closed)
+
+ def _get_size(self):
+ if not hasattr(self, '_size'):
+ if hasattr(self.file, 'size'):
+ self._size = self.file.size
+ elif os.path.exists(self.file.name):
+ self._size = os.path.getsize(self.file.name)
+ else:
+ raise AttributeError("Unable to determine the file's size.")
+ return self._size
+
+ def _set_size(self, size):
+ self._size = size
+
+ size = property(_get_size, _set_size)
+
+ def chunks(self, chunk_size=None):
+ """
+ Read the file and yield chucks of ``chunk_size`` bytes (defaults to
+ ``UploadedFile.DEFAULT_CHUNK_SIZE``).
+ """
+ if not chunk_size:
+ chunk_size = self.__class__.DEFAULT_CHUNK_SIZE
+
+ if hasattr(self, 'seek'):
+ self.seek(0)
+ # Assume the pointer is at zero...
+ counter = self.size
+
+ while counter > 0:
+ yield self.read(chunk_size)
+ counter -= chunk_size
+
+ def multiple_chunks(self, chunk_size=None):
+ """
+ Returns ``True`` if you can expect multiple chunks.
+
+ NB: If a particular file representation is in memory, subclasses should
+ always return ``False`` -- there's no good reason to read from memory in
+ chunks.
+ """
+ if not chunk_size:
+ chunk_size = self.DEFAULT_CHUNK_SIZE
+ return self.size > chunk_size
+
+ def xreadlines(self):
+ return iter(self)
+
+ def readlines(self):
+ return list(self.xreadlines())
+
+ def __iter__(self):
+ # Iterate over this file-like object by newlines
+ buffer_ = None
+ for chunk in self.chunks():
+ chunk_buffer = StringIO(chunk)
+
+ for line in chunk_buffer:
+ if buffer_:
+ line = buffer_ + line
+ buffer_ = None
+
+ # If this is the end of a line, yield
+ # otherwise, wait for the next round
+ if line[-1] in ('\n', '\r'):
+ yield line
+ else:
+ buffer_ = line
+
+ if buffer_ is not None:
+ yield buffer_
+
+ def open(self, mode=None):
+ if not self.closed:
+ self.seek(0)
+ elif os.path.exists(self.file.name):
+ self.file = open(self.file.name, mode or self.file.mode)
+ else:
+ raise ValueError("The file cannot be reopened.")
+
+ def seek(self, position):
+ self.file.seek(position)
+
+ def tell(self):
+ return self.file.tell()
+
+ def read(self, num_bytes=None):
+ if num_bytes is None:
+ return self.file.read()
+ return self.file.read(num_bytes)
+
+ def write(self, content):
+ if not self.mode.startswith('w'):
+ raise IOError("File was not opened with write access.")
+ self.file.write(content)
+
+ def flush(self):
+ if not self.mode.startswith('w'):
+ raise IOError("File was not opened with write access.")
+ self.file.flush()
+
+ def close(self):
+ self.file.close()
+ self._closed = True
+
+class ContentFile(File):
+ """
+ A File-like object that takes just raw content, rather than an actual file.
+ """
+ def __init__(self, content):
+ self.file = StringIO(content or '')
+ self.size = len(content or '')
+ self.file.seek(0)
+ self._closed = False
+
+ def __str__(self):
+ return 'Raw content'
+
+ def __nonzero__(self):
+ return True
+
+ def open(self, mode=None):
+ if self._closed:
+ self._closed = False
+ self.seek(0)
diff --git a/webapp/django/core/files/images.py b/webapp/django/core/files/images.py
new file mode 100644
index 0000000000..5ddcdd4322
--- /dev/null
+++ b/webapp/django/core/files/images.py
@@ -0,0 +1,42 @@
+"""
+Utility functions for handling images.
+
+Requires PIL, as you might imagine.
+"""
+
+from django.core.files import File
+
+class ImageFile(File):
+ """
+ A mixin for use alongside django.core.files.base.File, which provides
+ additional features for dealing with images.
+ """
+ def _get_width(self):
+ return self._get_image_dimensions()[0]
+ width = property(_get_width)
+
+ def _get_height(self):
+ return self._get_image_dimensions()[1]
+ height = property(_get_height)
+
+ def _get_image_dimensions(self):
+ if not hasattr(self, '_dimensions_cache'):
+ self._dimensions_cache = get_image_dimensions(self)
+ return self._dimensions_cache
+
+def get_image_dimensions(file_or_path):
+ """Returns the (width, height) of an image, given an open file or a path."""
+ from PIL import ImageFile as PILImageFile
+ p = PILImageFile.Parser()
+ if hasattr(file_or_path, 'read'):
+ file = file_or_path
+ else:
+ file = open(file_or_path, 'rb')
+ while 1:
+ data = file.read(1024)
+ if not data:
+ break
+ p.feed(data)
+ if p.image:
+ return p.image.size
+ return None
diff --git a/webapp/django/core/files/locks.py b/webapp/django/core/files/locks.py
new file mode 100644
index 0000000000..98a11551a7
--- /dev/null
+++ b/webapp/django/core/files/locks.py
@@ -0,0 +1,70 @@
+"""
+Portable file locking utilities.
+
+Based partially on example by Jonathan Feignberg <jdf@pobox.com> in the Python
+Cookbook, licensed under the Python Software License.
+
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203
+
+Example Usage::
+
+ >>> from django.core.files import locks
+ >>> f = open('./file', 'wb')
+ >>> locks.lock(f, locks.LOCK_EX)
+ >>> f.write('Django')
+ >>> f.close()
+"""
+
+__all__ = ('LOCK_EX','LOCK_SH','LOCK_NB','lock','unlock')
+
+system_type = None
+
+try:
+ import win32con
+ import win32file
+ import pywintypes
+ LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
+ LOCK_SH = 0
+ LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY
+ __overlapped = pywintypes.OVERLAPPED()
+ system_type = 'nt'
+except (ImportError, AttributeError):
+ pass
+
+try:
+ import fcntl
+ LOCK_EX = fcntl.LOCK_EX
+ LOCK_SH = fcntl.LOCK_SH
+ LOCK_NB = fcntl.LOCK_NB
+ system_type = 'posix'
+except (ImportError, AttributeError):
+ pass
+
+def fd(f):
+ """Get a filedescriptor from something which could be a file or an fd."""
+ return hasattr(f, 'fileno') and f.fileno() or f
+
+if system_type == 'nt':
+ def lock(file, flags):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped)
+
+ def unlock(file):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped)
+elif system_type == 'posix':
+ def lock(file, flags):
+ fcntl.flock(fd(file), flags)
+
+ def unlock(file):
+ fcntl.flock(fd(file), fcntl.LOCK_UN)
+else:
+ # File locking is not supported.
+ LOCK_EX = LOCK_SH = LOCK_NB = None
+
+ # Dummy functions that don't do anything.
+ def lock(file, flags):
+ pass
+
+ def unlock(file):
+ pass
diff --git a/webapp/django/core/files/move.py b/webapp/django/core/files/move.py
new file mode 100644
index 0000000000..58a0ab125d
--- /dev/null
+++ b/webapp/django/core/files/move.py
@@ -0,0 +1,88 @@
+"""
+Move a file in the safest way possible::
+
+ >>> from django.core.files.move import file_move_save
+ >>> file_move_save("/tmp/old_file", "/tmp/new_file")
+"""
+
+import os
+from django.core.files import locks
+
+try:
+ from shutil import copystat
+except ImportError:
+ def copystat(src, dst):
+ """Copy all stat info (mode bits, atime and mtime) from src to dst"""
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ if hasattr(os, 'utime'):
+ os.utime(dst, (st.st_atime, st.st_mtime))
+ if hasattr(os, 'chmod'):
+ os.chmod(dst, mode)
+
+__all__ = ['file_move_safe']
+
+def _samefile(src, dst):
+ # Macintosh, Unix.
+ if hasattr(os.path,'samefile'):
+ try:
+ return os.path.samefile(src, dst)
+ except OSError:
+ return False
+
+ # All other platforms: check for same pathname.
+ return (os.path.normcase(os.path.abspath(src)) ==
+ os.path.normcase(os.path.abspath(dst)))
+
+def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
+ """
+ Moves a file from one location to another in the safest way possible.
+
+ First, try using ``shutils.move``, which is OS-dependent but doesn't break
+ if moving across filesystems. Then, try ``os.rename``, which will break
+ across filesystems. Finally, streams manually from one file to another in
+ pure Python.
+
+ If the destination file exists and ``allow_overwrite`` is ``False``, this
+ function will throw an ``IOError``.
+ """
+
+ # There's no reason to move if we don't have to.
+ if _samefile(old_file_name, new_file_name):
+ return
+
+ try:
+ os.rename(old_file_name, new_file_name)
+ return
+ except OSError:
+ # This will happen with os.rename if moving to another filesystem
+ # or when moving opened files on certain operating systems
+ pass
+
+ # first open the old file, so that it won't go away
+ old_file = open(old_file_name, 'rb')
+ try:
+ # now open the new file, not forgetting allow_overwrite
+ fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
+ (not allow_overwrite and os.O_EXCL or 0))
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ current_chunk = None
+ while current_chunk != '':
+ current_chunk = old_file.read(chunk_size)
+ os.write(fd, current_chunk)
+ finally:
+ locks.unlock(fd)
+ os.close(fd)
+ finally:
+ old_file.close()
+ copystat(old_file_name, new_file_name)
+
+ try:
+ os.remove(old_file_name)
+ except OSError, e:
+ # Certain operating systems (Cygwin and Windows)
+ # fail when deleting opened files, ignore it
+ if getattr(e, 'winerror', 0) != 32:
+ # FIXME: should we also ignore errno 13?
+ raise
diff --git a/webapp/django/core/files/storage.py b/webapp/django/core/files/storage.py
new file mode 100644
index 0000000000..30d9be9f00
--- /dev/null
+++ b/webapp/django/core/files/storage.py
@@ -0,0 +1,224 @@
+import os
+import urlparse
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
+from django.utils.encoding import force_unicode
+from django.utils.text import get_valid_filename
+from django.utils._os import safe_join
+from django.core.files import locks, File
+from django.core.files.move import file_move_safe
+
+__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
+
+class Storage(object):
+ """
+ A base storage class, providing some default behaviors that all other
+ storage systems can inherit or override, as necessary.
+ """
+
+ # The following methods represent a public interface to private methods.
+ # These shouldn't be overridden by subclasses unless absolutely necessary.
+
+ def open(self, name, mode='rb', mixin=None):
+ """
+ Retrieves the specified file from storage, using the optional mixin
+ class to customize what features are available on the File returned.
+ """
+ file = self._open(name, mode)
+ if mixin:
+ # Add the mixin as a parent class of the File returned from storage.
+ file.__class__ = type(mixin.__name__, (mixin, file.__class__), {})
+ return file
+
+ def save(self, name, content):
+ """
+ Saves new content to the file specified by name. The content should be a
+ proper File object, ready to be read from the beginning.
+ """
+ # Get the proper name for the file, as it will actually be saved.
+ if name is None:
+ name = content.name
+
+ name = self.get_available_name(name)
+ name = self._save(name, content)
+
+ # Store filenames with forward slashes, even on Windows
+ return force_unicode(name.replace('\\', '/'))
+
+ # These methods are part of the public API, with default implementations.
+
+ def get_valid_name(self, name):
+ """
+ Returns a filename, based on the provided filename, that's suitable for
+ use in the target storage system.
+ """
+ return get_valid_filename(name)
+
+ def get_available_name(self, name):
+ """
+ Returns a filename that's free on the target storage system, and
+ available for new content to be written to.
+ """
+ # If the filename already exists, keep adding an underscore to the name
+ # of the file until the filename doesn't exist.
+ while self.exists(name):
+ try:
+ dot_index = name.rindex('.')
+ except ValueError: # filename has no dot
+ name += '_'
+ else:
+ name = name[:dot_index] + '_' + name[dot_index:]
+ return name
+
+ def path(self, name):
+ """
+ Returns a local filesystem path where the file can be retrieved using
+ Python's built-in open() function. Storage systems that can't be
+ accessed using open() should *not* implement this method.
+ """
+ raise NotImplementedError("This backend doesn't support absolute paths.")
+
+ # The following methods form the public API for storage systems, but with
+ # no default implementations. Subclasses must implement *all* of these.
+
+ def delete(self, name):
+ """
+ Deletes the specified file from the storage system.
+ """
+ raise NotImplementedError()
+
+ def exists(self, name):
+ """
+ Returns True if a file referened by the given name already exists in the
+ storage system, or False if the name is available for a new file.
+ """
+ raise NotImplementedError()
+
+ def listdir(self, path):
+ """
+ Lists the contents of the specified path, returning a 2-tuple of lists;
+ the first item being directories, the second item being files.
+ """
+ raise NotImplementedError()
+
+ def size(self, name):
+ """
+ Returns the total size, in bytes, of the file specified by name.
+ """
+ raise NotImplementedError()
+
+ def url(self, name):
+ """
+ Returns an absolute URL where the file's contents can be accessed
+ directly by a web browser.
+ """
+ raise NotImplementedError()
+
+class FileSystemStorage(Storage):
+ """
+ Standard filesystem storage
+ """
+
+ def __init__(self, location=settings.MEDIA_ROOT, base_url=settings.MEDIA_URL):
+ self.location = os.path.abspath(location)
+ self.base_url = base_url
+
+ def _open(self, name, mode='rb'):
+ return File(open(self.path(name), mode))
+
+ def _save(self, name, content):
+ full_path = self.path(name)
+
+ directory = os.path.dirname(full_path)
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+ elif not os.path.isdir(directory):
+ raise IOError("%s exists and is not a directory." % directory)
+
+ # There's a potential race condition between get_available_name and
+ # saving the file; it's possible that two threads might return the
+ # same name, at which point all sorts of fun happens. So we need to
+ # try to create the file, but if it already exists we have to go back
+ # to get_available_name() and try again.
+
+ while True:
+ try:
+ # This file has a file path that we can move.
+ if hasattr(content, 'temporary_file_path'):
+ file_move_safe(content.temporary_file_path(), full_path)
+ content.close()
+
+ # This is a normal uploadedfile that we can stream.
+ else:
+ # This fun binary flag incantation makes os.open throw an
+ # OSError if the file already exists before we open it.
+ fd = os.open(full_path, os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0))
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ for chunk in content.chunks():
+ os.write(fd, chunk)
+ finally:
+ locks.unlock(fd)
+ os.close(fd)
+ except OSError:
+ # Ooops, we need a new file name.
+ name = self.get_available_name(name)
+ full_path = self.path(name)
+ else:
+ # OK, the file save worked. Break out of the loop.
+ break
+
+ return name
+
+ def delete(self, name):
+ name = self.path(name)
+ # If the file exists, delete it from the filesystem.
+ if os.path.exists(name):
+ os.remove(name)
+
+ def exists(self, name):
+ return os.path.exists(self.path(name))
+
+ def listdir(self, path):
+ path = self.path(path)
+ directories, files = [], []
+ for entry in os.listdir(path):
+ if os.path.isdir(os.path.join(path, entry)):
+ directories.append(entry)
+ else:
+ files.append(entry)
+ return directories, files
+
+ def path(self, name):
+ try:
+ path = safe_join(self.location, name)
+ except ValueError:
+ raise SuspiciousOperation("Attempted access to '%s' denied." % name)
+ return os.path.normpath(path)
+
+ def size(self, name):
+ return os.path.getsize(self.path(name))
+
+ def url(self, name):
+ if self.base_url is None:
+ raise ValueError("This file is not accessible via a URL.")
+ return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+
+def get_storage_class(import_path):
+ try:
+ dot = import_path.rindex('.')
+ except ValueError:
+ raise ImproperlyConfigured("%s isn't a storage module." % import_path)
+ module, classname = import_path[:dot], import_path[dot+1:]
+ try:
+ mod = __import__(module, {}, {}, [''])
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing storage module %s: "%s"' % (module, e))
+ try:
+ return getattr(mod, classname)
+ except AttributeError:
+ raise ImproperlyConfigured('Storage module "%s" does not define a "%s" class.' % (module, classname))
+
+DefaultStorage = get_storage_class(settings.DEFAULT_FILE_STORAGE)
+default_storage = DefaultStorage()
diff --git a/webapp/django/core/files/temp.py b/webapp/django/core/files/temp.py
new file mode 100644
index 0000000000..f0d7d744ba
--- /dev/null
+++ b/webapp/django/core/files/temp.py
@@ -0,0 +1,62 @@
+"""
+The temp module provides a NamedTemporaryFile that can be re-opened on any
+platform. Most platforms use the standard Python tempfile.TemporaryFile class,
+but MS Windows users are given a custom class.
+
+This is needed because in Windows NT, the default implementation of
+NamedTemporaryFile uses the O_TEMPORARY flag, and thus cannot be reopened [1].
+
+1: http://mail.python.org/pipermail/python-list/2005-December/359474.html
+"""
+
+import os
+import tempfile
+
+__all__ = ('NamedTemporaryFile', 'gettempdir',)
+
+if os.name == 'nt':
+ class TemporaryFile(object):
+ """
+ Temporary file object constructor that works in Windows and supports
+ reopening of the temporary file in windows.
+ """
+ def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='',
+ dir=None):
+ fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
+ dir=dir)
+ self.name = name
+ self.file = os.fdopen(fd, mode, bufsize)
+ self.close_called = False
+
+ # Because close can be called during shutdown
+ # we need to cache os.unlink and access it
+ # as self.unlink only
+ unlink = os.unlink
+
+ def close(self):
+ if not self.close_called:
+ self.close_called = True
+ try:
+ self.file.close()
+ except (OSError, IOError):
+ pass
+ try:
+ self.unlink(self.name)
+ except (OSError):
+ pass
+
+ def __del__(self):
+ self.close()
+
+ def read(self, *args): return self.file.read(*args)
+ def seek(self, offset): return self.file.seek(offset)
+ def write(self, s): return self.file.write(s)
+ def __iter__(self): return iter(self.file)
+ def readlines(self, size=None): return self.file.readlines(size)
+ def xreadlines(self): return self.file.xreadlines()
+
+ NamedTemporaryFile = TemporaryFile
+else:
+ NamedTemporaryFile = tempfile.NamedTemporaryFile
+
+gettempdir = tempfile.gettempdir
diff --git a/webapp/django/core/files/uploadedfile.py b/webapp/django/core/files/uploadedfile.py
new file mode 100644
index 0000000000..afbcdba0f0
--- /dev/null
+++ b/webapp/django/core/files/uploadedfile.py
@@ -0,0 +1,156 @@
+"""
+Classes representing uploaded files.
+"""
+
+import os
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from django.conf import settings
+from django.core.files.base import File
+from django.core.files import temp as tempfile
+
+__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
+ 'SimpleUploadedFile')
+
+class UploadedFile(File):
+ """
+ A abstract uploaded file (``TemporaryUploadedFile`` and
+ ``InMemoryUploadedFile`` are the built-in concrete subclasses).
+
+ An ``UploadedFile`` object behaves somewhat like a file object and
+ represents some file data that the user submitted with a form.
+ """
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, name=None, content_type=None, size=None, charset=None):
+ self.name = name
+ self.size = size
+ self.content_type = content_type
+ self.charset = charset
+
+ def __repr__(self):
+ return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type)
+
+ def _get_name(self):
+ return self._name
+
+ def _set_name(self, name):
+ # Sanitize the file name so that it can't be dangerous.
+ if name is not None:
+ # Just use the basename of the file -- anything else is dangerous.
+ name = os.path.basename(name)
+
+ # File names longer than 255 characters can cause problems on older OSes.
+ if len(name) > 255:
+ name, ext = os.path.splitext(name)
+ name = name[:255 - len(ext)] + ext
+
+ self._name = name
+
+ name = property(_get_name, _set_name)
+
+ # Abstract methods; subclasses *must* define read() and probably should
+ # define open/close.
+ def read(self, num_bytes=None):
+ raise NotImplementedError()
+
+ def open(self):
+ pass
+
+ def close(self):
+ pass
+
+class TemporaryUploadedFile(UploadedFile):
+ """
+ A file uploaded to a temporary location (i.e. stream-to-disk).
+ """
+ def __init__(self, name, content_type, size, charset):
+ super(TemporaryUploadedFile, self).__init__(name, content_type, size, charset)
+ if settings.FILE_UPLOAD_TEMP_DIR:
+ self._file = tempfile.NamedTemporaryFile(suffix='.upload', dir=settings.FILE_UPLOAD_TEMP_DIR)
+ else:
+ self._file = tempfile.NamedTemporaryFile(suffix='.upload')
+
+ def temporary_file_path(self):
+ """
+ Returns the full path of this file.
+ """
+ return self._file.name
+
+ # Most methods on this object get proxied to NamedTemporaryFile.
+ # We can't directly subclass because NamedTemporaryFile is actually a
+ # factory function
+ def read(self, *args): return self._file.read(*args)
+ def seek(self, offset): return self._file.seek(offset)
+ def write(self, s): return self._file.write(s)
+ def __iter__(self): return iter(self._file)
+ def readlines(self, size=None): return self._file.readlines(size)
+ def xreadlines(self): return self._file.xreadlines()
+ def close(self):
+ try:
+ return self._file.close()
+ except OSError, e:
+ if e.errno == 2:
+ # Means the file was moved or deleted before the tempfile could unlink it.
+ # Still sets self._file.close_called and calls self._file.file.close()
+ # before the exception
+ return
+ else:
+ raise e
+
+class InMemoryUploadedFile(UploadedFile):
+ """
+ A file uploaded into memory (i.e. stream-to-memory).
+ """
+ def __init__(self, file, field_name, name, content_type, size, charset):
+ super(InMemoryUploadedFile, self).__init__(name, content_type, size, charset)
+ self.file = file
+ self.field_name = field_name
+ self.file.seek(0)
+
+ def seek(self, *args, **kwargs):
+ self.file.seek(*args, **kwargs)
+
+ def open(self):
+ self.seek(0)
+
+ def read(self, *args, **kwargs):
+ return self.file.read(*args, **kwargs)
+
+ def chunks(self, chunk_size=None):
+ self.file.seek(0)
+ yield self.read()
+
+ def multiple_chunks(self, chunk_size=None):
+ # Since it's in memory, we'll never have multiple chunks.
+ return False
+
+class SimpleUploadedFile(InMemoryUploadedFile):
+ """
+ A simple representation of a file, which just has content, size, and a name.
+ """
+ def __init__(self, name, content, content_type='text/plain'):
+ self.file = StringIO(content or '')
+ self.name = name
+ self.field_name = None
+ self.size = len(content or '')
+ self.content_type = content_type
+ self.charset = None
+ self.file.seek(0)
+
+ def from_dict(cls, file_dict):
+ """
+ Creates a SimpleUploadedFile object from
+ a dictionary object with the following keys:
+ - filename
+ - content-type
+ - content
+ """
+ return cls(file_dict['filename'],
+ file_dict['content'],
+ file_dict.get('content-type', 'text/plain'))
+
+ from_dict = classmethod(from_dict)
diff --git a/webapp/django/core/files/uploadhandler.py b/webapp/django/core/files/uploadhandler.py
new file mode 100644
index 0000000000..fa4d2df804
--- /dev/null
+++ b/webapp/django/core/files/uploadhandler.py
@@ -0,0 +1,213 @@
+"""
+Base file upload handler classes, and the built-in concrete subclasses
+"""
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile
+
+__all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler',
+ 'TemporaryFileUploadHandler', 'MemoryFileUploadHandler',
+ 'load_handler']
+
+class UploadFileException(Exception):
+ """
+ Any error having to do with uploading files.
+ """
+ pass
+
+class StopUpload(UploadFileException):
+ """
+ This exception is raised when an upload must abort.
+ """
+ def __init__(self, connection_reset=False):
+ """
+ If ``connection_reset`` is ``True``, Django knows will halt the upload
+ without consuming the rest of the upload. This will cause the browser to
+ show a "connection reset" error.
+ """
+ self.connection_reset = connection_reset
+
+ def __unicode__(self):
+ if self.connection_reset:
+ return u'StopUpload: Halt current upload.'
+ else:
+ return u'StopUpload: Consume request data, then halt.'
+
+class SkipFile(UploadFileException):
+ """
+ This exception is raised by an upload handler that wants to skip a given file.
+ """
+ pass
+
+class StopFutureHandlers(UploadFileException):
+ """
+ Upload handers that have handled a file and do not want future handlers to
+ run should raise this exception instead of returning None.
+ """
+ pass
+
+class FileUploadHandler(object):
+ """
+ Base class for streaming upload handlers.
+ """
+ chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB.
+
+ def __init__(self, request=None):
+ self.file_name = None
+ self.content_type = None
+ self.content_length = None
+ self.charset = None
+ self.request = request
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Handle the raw input from the client.
+
+ Parameters:
+
+ :input_data:
+ An object that supports reading via .read().
+ :META:
+ ``request.META``.
+ :content_length:
+ The (integer) value of the Content-Length header from the
+ client.
+ :boundary: The boundary from the Content-Type header. Be sure to
+ prepend two '--'.
+ """
+ pass
+
+ def new_file(self, field_name, file_name, content_type, content_length, charset=None):
+ """
+ Signal that a new file has been started.
+
+ Warning: As with any data from the client, you should not trust
+ content_length (and sometimes won't even get it).
+ """
+ self.field_name = field_name
+ self.file_name = file_name
+ self.content_type = content_type
+ self.content_length = content_length
+ self.charset = charset
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Receive data from the streamed upload parser. ``start`` is the position
+ in the file of the chunk.
+ """
+ raise NotImplementedError()
+
+ def file_complete(self, file_size):
+ """
+ Signal that a file has completed. File size corresponds to the actual
+ size accumulated by all the chunks.
+
+ Subclasses must should return a valid ``UploadedFile`` object.
+ """
+ raise NotImplementedError()
+
+ def upload_complete(self):
+ """
+ Signal that the upload is complete. Subclasses should perform cleanup
+ that is necessary for this handler.
+ """
+ pass
+
+class TemporaryFileUploadHandler(FileUploadHandler):
+ """
+ Upload handler that streams data into a temporary file.
+ """
+ def __init__(self, *args, **kwargs):
+ super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
+
+ def new_file(self, file_name, *args, **kwargs):
+ """
+ Create the file object to append to as data is coming in.
+ """
+ super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
+ self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
+
+ def receive_data_chunk(self, raw_data, start):
+ self.file.write(raw_data)
+
+ def file_complete(self, file_size):
+ self.file.seek(0)
+ self.file.size = file_size
+ return self.file
+
+class MemoryFileUploadHandler(FileUploadHandler):
+ """
+ File upload handler to stream uploads into memory (used for small files).
+ """
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Use the content_length to signal whether or not this handler should be in use.
+ """
+ # Check the content-length header to see if we should
+ # If the the post is too large, we cannot use the Memory handler.
+ if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
+ self.activated = False
+ else:
+ self.activated = True
+
+ def new_file(self, *args, **kwargs):
+ super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
+ if self.activated:
+ self.file = StringIO()
+ raise StopFutureHandlers()
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Add the data to the StringIO file.
+ """
+ if self.activated:
+ self.file.write(raw_data)
+ else:
+ return raw_data
+
+ def file_complete(self, file_size):
+ """
+ Return a file object if we're activated.
+ """
+ if not self.activated:
+ return
+
+ return InMemoryUploadedFile(
+ file = self.file,
+ field_name = self.field_name,
+ name = self.file_name,
+ content_type = self.content_type,
+ size = file_size,
+ charset = self.charset
+ )
+
+
+def load_handler(path, *args, **kwargs):
+ """
+ Given a path to a handler, return an instance of that handler.
+
+ E.g.::
+ >>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
+ <TemporaryFileUploadHandler object at 0x...>
+
+ """
+ i = path.rfind('.')
+ module, attr = path[:i], path[i+1:]
+ try:
+ mod = __import__(module, {}, {}, [attr])
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing upload handler module %s: "%s"' % (module, e))
+ except ValueError, e:
+ raise ImproperlyConfigured('Error importing upload handler module. Is FILE_UPLOAD_HANDLERS a correctly defined list or tuple?')
+ try:
+ cls = getattr(mod, attr)
+ except AttributeError:
+ raise ImproperlyConfigured('Module "%s" does not define a "%s" upload handler backend' % (module, attr))
+ return cls(*args, **kwargs)
diff --git a/webapp/django/core/handlers/__init__.py b/webapp/django/core/handlers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/webapp/django/core/handlers/__init__.py
diff --git a/webapp/django/core/handlers/base.py b/webapp/django/core/handlers/base.py
new file mode 100644
index 0000000000..b34d0e096e
--- /dev/null
+++ b/webapp/django/core/handlers/base.py
@@ -0,0 +1,200 @@
+import sys
+
+from django import http
+from django.core import signals
+from django.utils.encoding import force_unicode
+
+class BaseHandler(object):
+ # Changes that are always applied to a response (in this order).
+ response_fixes = [
+ http.fix_location_header,
+ http.conditional_content_removal,
+ http.fix_IE_for_attach,
+ http.fix_IE_for_vary,
+ ]
+
+ def __init__(self):
+ self._request_middleware = self._view_middleware = self._response_middleware = self._exception_middleware = None
+
+ def load_middleware(self):
+ """
+ Populate middleware lists from settings.MIDDLEWARE_CLASSES.
+
+ Must be called after the environment is fixed (see __call__).
+ """
+ from django.conf import settings
+ from django.core import exceptions
+ self._request_middleware = []
+ self._view_middleware = []
+ self._response_middleware = []
+ self._exception_middleware = []
+ for middleware_path in settings.MIDDLEWARE_CLASSES:
+ try:
+ dot = middleware_path.rindex('.')
+ except ValueError:
+ raise exceptions.ImproperlyConfigured, '%s isn\'t a middleware module' % middleware_path
+ mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
+ try:
+ mod = __import__(mw_module, {}, {}, [''])
+ except ImportError, e:
+ raise exceptions.ImproperlyConfigured, 'Error importing middleware %s: "%s"' % (mw_module, e)
+ try:
+ mw_class = getattr(mod, mw_classname)
+ except AttributeError:
+ raise exceptions.ImproperlyConfigured, 'Middleware module "%s" does not define a "%s" class' % (mw_module, mw_classname)
+
+ try:
+ mw_instance = mw_class()
+ except exceptions.MiddlewareNotUsed:
+ continue
+
+ if hasattr(mw_instance, 'process_request'):
+ self._request_middleware.append(mw_instance.process_request)
+ if hasattr(mw_instance, 'process_view'):
+ self._view_middleware.append(mw_instance.process_view)
+ if hasattr(mw_instance, 'process_response'):
+ self._response_middleware.insert(0, mw_instance.process_response)
+ if hasattr(mw_instance, 'process_exception'):
+ self._exception_middleware.insert(0, mw_instance.process_exception)
+
+ def get_response(self, request):
+ "Returns an HttpResponse object for the given HttpRequest"
+ from django.core import exceptions, urlresolvers
+ from django.conf import settings
+
+ # Apply request middleware
+ for middleware_method in self._request_middleware:
+ response = middleware_method(request)
+ if response:
+ return response
+
+ # Get urlconf from request object, if available. Otherwise use default.
+ urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
+
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+ try:
+ callback, callback_args, callback_kwargs = resolver.resolve(
+ request.path_info)
+
+ # Apply view middleware
+ for middleware_method in self._view_middleware:
+ response = middleware_method(request, callback, callback_args, callback_kwargs)
+ if response:
+ return response
+
+ try:
+ response = callback(request, *callback_args, **callback_kwargs)
+ except Exception, e:
+ # If the view raised an exception, run it through exception
+ # middleware, and if the exception middleware returns a
+ # response, use that. Otherwise, reraise the exception.
+ for middleware_method in self._exception_middleware:
+ response = middleware_method(request, e)
+ if response:
+ return response
+ raise
+
+ # Complain if the view returned None (a common error).
+ if response is None:
+ try:
+ view_name = callback.func_name # If it's a function
+ except AttributeError:
+ view_name = callback.__class__.__name__ + '.__call__' # If it's a class
+ raise ValueError, "The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name)
+
+ return response
+ except http.Http404, e:
+ if settings.DEBUG:
+ from django.views import debug
+ return debug.technical_404_response(request, e)
+ else:
+ try:
+ callback, param_dict = resolver.resolve404()
+ return callback(request, **param_dict)
+ except:
+ try:
+ return self.handle_uncaught_exception(request, resolver, sys.exc_info())
+ finally:
+ receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
+ except exceptions.PermissionDenied:
+ return http.HttpResponseForbidden('<h1>Permission denied</h1>')
+ except SystemExit:
+ # Allow sys.exit() to actually exit. See tickets #1023 and #4701
+ raise
+ except: # Handle everything else, including SuspiciousOperation, etc.
+ # Get the exception info now, in case another exception is thrown later.
+ exc_info = sys.exc_info()
+ receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
+ return self.handle_uncaught_exception(request, resolver, exc_info)
+
+ def handle_uncaught_exception(self, request, resolver, exc_info):
+ """
+ Processing for any otherwise uncaught exceptions (those that will
+ generate HTTP 500 responses). Can be overridden by subclasses who want
+ customised 500 handling.
+
+ Be *very* careful when overriding this because the error could be
+ caused by anything, so assuming something like the database is always
+ available would be an error.
+ """
+ from django.conf import settings
+ from django.core.mail import mail_admins
+
+ if settings.DEBUG_PROPAGATE_EXCEPTIONS:
+ raise
+
+ if settings.DEBUG:
+ from django.views import debug
+ return debug.technical_500_response(request, *exc_info)
+
+ # When DEBUG is False, send an error message to the admins.
+ subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
+ try:
+ request_repr = repr(request)
+ except:
+ request_repr = "Request repr() unavailable"
+ message = "%s\n\n%s" % (self._get_traceback(exc_info), request_repr)
+ mail_admins(subject, message, fail_silently=True)
+ # Return an HttpResponse that displays a friendly error message.
+ callback, param_dict = resolver.resolve500()
+ return callback(request, **param_dict)
+
+ def _get_traceback(self, exc_info=None):
+ "Helper function to return the traceback as a string"
+ import traceback
+ return '\n'.join(traceback.format_exception(*(exc_info or sys.exc_info())))
+
+ def apply_response_fixes(self, request, response):
+ """
+ Applies each of the functions in self.response_fixes to the request and
+ response, modifying the response in the process. Returns the new
+ response.
+ """
+ for func in self.response_fixes:
+ response = func(request, response)
+ return response
+
+def get_script_name(environ):
+ """
+ Returns the equivalent of the HTTP request's SCRIPT_NAME environment
+ variable. If Apache mod_rewrite has been used, returns what would have been
+ the script name prior to any rewriting (so it's the script name as seen
+ from the client's perspective), unless DJANGO_USE_POST_REWRITE is set (to
+ anything).
+ """
+ from django.conf import settings
+ if settings.FORCE_SCRIPT_NAME is not None:
+ return force_unicode(settings.FORCE_SCRIPT_NAME)
+
+ # If Apache's mod_rewrite had a whack at the URL, Apache set either
+ # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
+ # rewrites. Unfortunately not every webserver (lighttpd!) passes this
+ # information through all the time, so FORCE_SCRIPT_NAME, above, is still
+ # needed.
+ script_url = environ.get('SCRIPT_URL', u'')
+ if not script_url:
+ script_url = environ.get('REDIRECT_URL', u'')
+ if script_url:
+ return force_unicode(script_url[:-len(environ.get('PATH_INFO', ''))])
+ return force_unicode(environ.get('SCRIPT_NAME', u''))
+
diff --git a/webapp/django/core/handlers/modpython.py b/webapp/django/core/handlers/modpython.py
new file mode 100644
index 0000000000..aa3fb23e39
--- /dev/null
+++ b/webapp/django/core/handlers/modpython.py
@@ -0,0 +1,210 @@
+import os
+from pprint import pformat
+
+from django import http
+from django.core import signals
+from django.core.handlers.base import BaseHandler
+from django.core.urlresolvers import set_script_prefix
+from django.utils import datastructures
+from django.utils.encoding import force_unicode, smart_str
+
+# NOTE: do *not* import settings (or any module which eventually imports
+# settings) until after ModPythonHandler has been called; otherwise os.environ
+# won't be set up correctly (with respect to settings).
+
+class ModPythonRequest(http.HttpRequest):
+ def __init__(self, req):
+ self._req = req
+ # FIXME: This isn't ideal. The request URI may be encoded (it's
+ # non-normalized) slightly differently to the "real" SCRIPT_NAME
+ # and PATH_INFO values. This causes problems when we compute path_info,
+ # below. For now, don't use script names that will be subject to
+ # encoding/decoding.
+ self.path = force_unicode(req.uri)
+ root = req.get_options().get('django.root', '')
+ self.django_root = root
+ # req.path_info isn't necessarily computed correctly in all
+ # circumstances (it's out of mod_python's control a bit), so we use
+ # req.uri and some string manipulations to get the right value.
+ if root and req.uri.startswith(root):
+ self.path_info = force_unicode(req.uri[len(root):])
+ else:
+ self.path_info = self.path
+ if not self.path_info:
+ # Django prefers empty paths to be '/', rather than '', to give us
+ # a common start character for URL patterns. So this is a little
+ # naughty, but also pretty harmless.
+ self.path_info = u'/'
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return smart_str(u'<ModPythonRequest\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' %
+ (self.path, unicode(get), unicode(post),
+ unicode(cookies), unicode(meta)))
+
+ def get_full_path(self):
+ return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')
+
+ def is_secure(self):
+ try:
+ return self._req.is_https()
+ except AttributeError:
+ # mod_python < 3.2.10 doesn't have req.is_https().
+ return self._req.subprocess_env.get('HTTPS', '').lower() in ('on', '1')
+
+ def _load_post_and_files(self):
+ "Populates self._post and self._files"
+ if 'content-type' in self._req.headers_in and self._req.headers_in['content-type'].startswith('multipart'):
+ self._raw_post_data = ''
+ self._post, self._files = self.parse_file_upload(self.META, self._req)
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ self._get = http.QueryDict(self._req.args, encoding=self._encoding)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self._req.headers_in.get('cookie', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_meta(self):
+ "Lazy loader that returns self.META dictionary"
+ if not hasattr(self, '_meta'):
+ self._meta = {
+ 'AUTH_TYPE': self._req.ap_auth_type,
+ 'CONTENT_LENGTH': self._req.clength, # This may be wrong
+ 'CONTENT_TYPE': self._req.content_type, # This may be wrong
+ 'GATEWAY_INTERFACE': 'CGI/1.1',
+ 'PATH_INFO': self.path_info,
+ 'PATH_TRANSLATED': None, # Not supported
+ 'QUERY_STRING': self._req.args,
+ 'REMOTE_ADDR': self._req.connection.remote_ip,
+ 'REMOTE_HOST': None, # DNS lookups not supported
+ 'REMOTE_IDENT': self._req.connection.remote_logname,
+ 'REMOTE_USER': self._req.user,
+ 'REQUEST_METHOD': self._req.method,
+ 'SCRIPT_NAME': self.django_root,
+ 'SERVER_NAME': self._req.server.server_hostname,
+ 'SERVER_PORT': self._req.server.port,
+ 'SERVER_PROTOCOL': self._req.protocol,
+ 'SERVER_SOFTWARE': 'mod_python'
+ }
+ for key, value in self._req.headers_in.items():
+ key = 'HTTP_' + key.upper().replace('-', '_')
+ self._meta[key] = value
+ return self._meta
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ self._raw_post_data = self._req.read()
+ return self._raw_post_data
+
+ def _get_method(self):
+ return self.META['REQUEST_METHOD'].upper()
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ META = property(_get_meta)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+ method = property(_get_method)
+
+class ModPythonHandler(BaseHandler):
+ request_class = ModPythonRequest
+
+ def __call__(self, req):
+ # mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that
+ os.environ.update(req.subprocess_env)
+
+ # now that the environ works we can see the correct settings, so imports
+ # that use settings now can work
+ from django.conf import settings
+
+ # if we need to set up middleware, now that settings works we can do it now.
+ if self._request_middleware is None:
+ self.load_middleware()
+
+ set_script_prefix(req.get_options().get('django.root', ''))
+ signals.request_started.send(sender=self.__class__)
+ try:
+ try:
+ request = self.request_class(req)
+ except UnicodeDecodeError:
+ response = http.HttpResponseBadRequest()
+ else:
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+ response = self.apply_response_fixes(request, response)
+ finally:
+ signals.request_finished.send(sender=self.__class__)
+
+ # Convert our custom HttpResponse object back into the mod_python req.
+ req.content_type = response['Content-Type']
+ for key, value in response.items():
+ if key != 'content-type':
+ req.headers_out[str(key)] = str(value)
+ for c in response.cookies.values():
+ req.headers_out.add('Set-Cookie', c.output(header=''))
+ req.status = response.status_code
+ try:
+ for chunk in response:
+ req.write(chunk)
+ finally:
+ response.close()
+
+ return 0 # mod_python.apache.OK
+
+def handler(req):
+ # mod_python hooks into this function.
+ return ModPythonHandler()(req)
diff --git a/webapp/django/core/handlers/profiler-hotshot.py b/webapp/django/core/handlers/profiler-hotshot.py
new file mode 100644
index 0000000000..6cf94b0c00
--- /dev/null
+++ b/webapp/django/core/handlers/profiler-hotshot.py
@@ -0,0 +1,22 @@
+import hotshot, time, os
+from django.core.handlers.modpython import ModPythonHandler
+
+PROFILE_DATA_DIR = "/var/log/cmsprofile"
+
+def handler(req):
+ '''
+ Handler that uses hotshot to store profile data.
+
+ Stores profile data in PROFILE_DATA_DIR. Since hotshot has no way (that I
+ know of) to append profile data to a single file, each request gets its own
+ profile. The file names are in the format <url>.<n>.prof where <url> is
+ the request path with "/" replaced by ".", and <n> is a timestamp with
+ microseconds to prevent overwriting files.
+
+ Use the gather_profile_stats.py script to gather these individual request
+ profiles into aggregated profiles by request path.
+ '''
+ profname = "%s.%.3f.prof" % (req.uri.strip("/").replace('/', '.'), time.time())
+ profname = os.path.join(PROFILE_DATA_DIR, profname)
+ prof = hotshot.Profile(profname)
+ return prof.runcall(ModPythonHandler(), req)
diff --git a/webapp/django/core/handlers/wsgi.py b/webapp/django/core/handlers/wsgi.py
new file mode 100644
index 0000000000..d1336b33be
--- /dev/null
+++ b/webapp/django/core/handlers/wsgi.py
@@ -0,0 +1,238 @@
+from threading import Lock
+from pprint import pformat
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from django import http
+from django.core import signals
+from django.core.handlers import base
+from django.core.urlresolvers import set_script_prefix
+from django.utils import datastructures
+from django.utils.encoding import force_unicode
+
+# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+STATUS_CODE_TEXT = {
+ 100: 'CONTINUE',
+ 101: 'SWITCHING PROTOCOLS',
+ 200: 'OK',
+ 201: 'CREATED',
+ 202: 'ACCEPTED',
+ 203: 'NON-AUTHORITATIVE INFORMATION',
+ 204: 'NO CONTENT',
+ 205: 'RESET CONTENT',
+ 206: 'PARTIAL CONTENT',
+ 300: 'MULTIPLE CHOICES',
+ 301: 'MOVED PERMANENTLY',
+ 302: 'FOUND',
+ 303: 'SEE OTHER',
+ 304: 'NOT MODIFIED',
+ 305: 'USE PROXY',
+ 306: 'RESERVED',
+ 307: 'TEMPORARY REDIRECT',
+ 400: 'BAD REQUEST',
+ 401: 'UNAUTHORIZED',
+ 402: 'PAYMENT REQUIRED',
+ 403: 'FORBIDDEN',
+ 404: 'NOT FOUND',
+ 405: 'METHOD NOT ALLOWED',
+ 406: 'NOT ACCEPTABLE',
+ 407: 'PROXY AUTHENTICATION REQUIRED',
+ 408: 'REQUEST TIMEOUT',
+ 409: 'CONFLICT',
+ 410: 'GONE',
+ 411: 'LENGTH REQUIRED',
+ 412: 'PRECONDITION FAILED',
+ 413: 'REQUEST ENTITY TOO LARGE',
+ 414: 'REQUEST-URI TOO LONG',
+ 415: 'UNSUPPORTED MEDIA TYPE',
+ 416: 'REQUESTED RANGE NOT SATISFIABLE',
+ 417: 'EXPECTATION FAILED',
+ 500: 'INTERNAL SERVER ERROR',
+ 501: 'NOT IMPLEMENTED',
+ 502: 'BAD GATEWAY',
+ 503: 'SERVICE UNAVAILABLE',
+ 504: 'GATEWAY TIMEOUT',
+ 505: 'HTTP VERSION NOT SUPPORTED',
+}
+
+def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0):
+ """
+ A version of shutil.copyfileobj that will not read more than 'size' bytes.
+ This makes it safe from clients sending more than CONTENT_LENGTH bytes of
+ data in the body.
+ """
+ if not size:
+ return
+ while size > 0:
+ buf = fsrc.read(min(length, size))
+ if not buf:
+ break
+ fdst.write(buf)
+ size -= len(buf)
+
+class WSGIRequest(http.HttpRequest):
+ def __init__(self, environ):
+ script_name = base.get_script_name(environ)
+ path_info = force_unicode(environ.get('PATH_INFO', u'/'))
+ if not path_info:
+ # Sometimes PATH_INFO exists, but is empty (e.g. accessing
+ # the SCRIPT_NAME URL without a trailing slash). We really need to
+ # operate as if they'd requested '/'. Not amazingly nice to force
+ # the path like this, but should be harmless.
+ path_info = u'/'
+ self.environ = environ
+ self.path_info = path_info
+ self.path = '%s%s' % (script_name, path_info)
+ self.META = environ
+ self.META['PATH_INFO'] = path_info
+ self.META['SCRIPT_NAME'] = script_name
+ self.method = environ['REQUEST_METHOD'].upper()
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return '<WSGIRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
+ (get, post, cookies, meta)
+
+ def get_full_path(self):
+ return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + self.environ.get('QUERY_STRING', '')) or '')
+
+ def is_secure(self):
+ return 'wsgi.url_scheme' in self.environ \
+ and self.environ['wsgi.url_scheme'] == 'https'
+
+ def _load_post_and_files(self):
+ # Populates self._post and self._files
+ if self.method == 'POST':
+ if self.environ.get('CONTENT_TYPE', '').startswith('multipart'):
+ self._raw_post_data = ''
+ self._post, self._files = self.parse_file_upload(self.META, self.environ['wsgi.input'])
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict()
+ else:
+ self._post, self._files = http.QueryDict('', encoding=self._encoding), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ # The WSGI spec says 'QUERY_STRING' may be absent.
+ self._get = http.QueryDict(self.environ.get('QUERY_STRING', ''), encoding=self._encoding)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self.environ.get('HTTP_COOKIE', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ buf = StringIO()
+ try:
+ # CONTENT_LENGTH might be absent if POST doesn't have content at all (lighttpd)
+ content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+ except (ValueError, TypeError):
+ # If CONTENT_LENGTH was empty string or not an integer, don't
+ # error out. We've also seen None passed in here (against all
+ # specs, but see ticket #8259), so we handle TypeError as well.
+ content_length = 0
+ if content_length > 0:
+ safe_copyfileobj(self.environ['wsgi.input'], buf,
+ size=content_length)
+ self._raw_post_data = buf.getvalue()
+ buf.close()
+ return self._raw_post_data
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+
+class WSGIHandler(base.BaseHandler):
+ initLock = Lock()
+ request_class = WSGIRequest
+
+ def __call__(self, environ, start_response):
+ from django.conf import settings
+
+ # Set up middleware if needed. We couldn't do this earlier, because
+ # settings weren't available.
+ if self._request_middleware is None:
+ self.initLock.acquire()
+ # Check that middleware is still uninitialised.
+ if self._request_middleware is None:
+ self.load_middleware()
+ self.initLock.release()
+
+ set_script_prefix(base.get_script_name(environ))
+ signals.request_started.send(sender=self.__class__)
+ try:
+ try:
+ request = self.request_class(environ)
+ except UnicodeDecodeError:
+ response = http.HttpResponseBadRequest()
+ else:
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+ response = self.apply_response_fixes(request, response)
+ finally:
+ signals.request_finished.send(sender=self.__class__)
+
+ try:
+ status_text = STATUS_CODE_TEXT[response.status_code]
+ except KeyError:
+ status_text = 'UNKNOWN STATUS CODE'
+ status = '%s %s' % (response.status_code, status_text)
+ response_headers = [(str(k), str(v)) for k, v in response.items()]
+ for c in response.cookies.values():
+ response_headers.append(('Set-Cookie', str(c.output(header=''))))
+ start_response(status, response_headers)
+ return response
+
diff --git a/webapp/django/core/mail.py b/webapp/django/core/mail.py
new file mode 100644
index 0000000000..1ac2a39908
--- /dev/null
+++ b/webapp/django/core/mail.py
@@ -0,0 +1,371 @@
+"""
+Tools for sending email.
+"""
+
+import mimetypes
+import os
+import smtplib
+import socket
+import time
+import random
+from email import Charset, Encoders
+from email.MIMEText import MIMEText
+from email.MIMEMultipart import MIMEMultipart
+from email.MIMEBase import MIMEBase
+from email.Header import Header
+from email.Utils import formatdate, parseaddr, formataddr
+
+from django.conf import settings
+from django.utils.encoding import smart_str, force_unicode
+
+# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
+# some spam filters.
+Charset.add_charset('utf-8', Charset.SHORTEST, Charset.QP, 'utf-8')
+
+# Default MIME type to use on attachments (if it is not explicitly given
+# and cannot be guessed).
+DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
+
+# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
+# seconds, which slows down the restart of the server.
+class CachedDnsName(object):
+ def __str__(self):
+ return self.get_fqdn()
+
+ def get_fqdn(self):
+ if not hasattr(self, '_fqdn'):
+ self._fqdn = socket.getfqdn()
+ return self._fqdn
+
+DNS_NAME = CachedDnsName()
+
+# Copied from Python standard library, with the following modifications:
+# * Used cached hostname for performance.
+# * Added try/except to support lack of getpid() in Jython (#5496).
+def make_msgid(idstring=None):
+ """Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
+
+ <20020201195627.33539.96671@nightshade.la.mastaler.com>
+
+ Optional idstring if given is a string used to strengthen the
+ uniqueness of the message id.
+ """
+ timeval = time.time()
+ utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
+ try:
+ pid = os.getpid()
+ except AttributeError:
+ # No getpid() in Jython, for example.
+ pid = 1
+ randint = random.randrange(100000)
+ if idstring is None:
+ idstring = ''
+ else:
+ idstring = '.' + idstring
+ idhost = DNS_NAME
+ msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
+ return msgid
+
+class BadHeaderError(ValueError):
+ pass
+
+def forbid_multi_line_headers(name, val):
+ """Forbids multi-line headers, to prevent header injection."""
+ val = force_unicode(val)
+ if '\n' in val or '\r' in val:
+ raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
+ try:
+ val = val.encode('ascii')
+ except UnicodeEncodeError:
+ if name.lower() in ('to', 'from', 'cc'):
+ result = []
+ for item in val.split(', '):
+ nm, addr = parseaddr(item)
+ nm = str(Header(nm, settings.DEFAULT_CHARSET))
+ result.append(formataddr((nm, str(addr))))
+ val = ', '.join(result)
+ else:
+ val = Header(val, settings.DEFAULT_CHARSET)
+ else:
+ if name.lower() == 'subject':
+ val = Header(val)
+ return name, val
+
+class SafeMIMEText(MIMEText):
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val)
+ MIMEText.__setitem__(self, name, val)
+
+class SafeMIMEMultipart(MIMEMultipart):
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val)
+ MIMEMultipart.__setitem__(self, name, val)
+
+class SMTPConnection(object):
+ """
+ A wrapper that manages the SMTP network connection.
+ """
+
+ def __init__(self, host=None, port=None, username=None, password=None,
+ use_tls=None, fail_silently=False):
+ self.host = host or settings.EMAIL_HOST
+ self.port = port or settings.EMAIL_PORT
+ self.username = username or settings.EMAIL_HOST_USER
+ self.password = password or settings.EMAIL_HOST_PASSWORD
+ self.use_tls = (use_tls is not None) and use_tls or settings.EMAIL_USE_TLS
+ self.fail_silently = fail_silently
+ self.connection = None
+
+ def open(self):
+ """
+ Ensures we have a connection to the email server. Returns whether or
+ not a new connection was required (True or False).
+ """
+ if self.connection:
+ # Nothing to do if the connection is already open.
+ return False
+ try:
+ # If local_hostname is not specified, socket.getfqdn() gets used.
+ # For performance, we use the cached FQDN for local_hostname.
+ self.connection = smtplib.SMTP(self.host, self.port,
+ local_hostname=DNS_NAME.get_fqdn())
+ if self.use_tls:
+ self.connection.ehlo()
+ self.connection.starttls()
+ self.connection.ehlo()
+ if self.username and self.password:
+ self.connection.login(self.username, self.password)
+ return True
+ except:
+ if not self.fail_silently:
+ raise
+
+ def close(self):
+ """Closes the connection to the email server."""
+ try:
+ try:
+ self.connection.quit()
+ except socket.sslerror:
+ # This happens when calling quit() on a TLS connection
+ # sometimes.
+ self.connection.close()
+ except:
+ if self.fail_silently:
+ return
+ raise
+ finally:
+ self.connection = None
+
+ def send_messages(self, email_messages):
+ """
+ Sends one or more EmailMessage objects and returns the number of email
+ messages sent.
+ """
+ if not email_messages:
+ return
+ new_conn_created = self.open()
+ if not self.connection:
+ # We failed silently on open(). Trying to send would be pointless.
+ return
+ num_sent = 0
+ for message in email_messages:
+ sent = self._send(message)
+ if sent:
+ num_sent += 1
+ if new_conn_created:
+ self.close()
+ return num_sent
+
+ def _send(self, email_message):
+ """A helper method that does the actual sending."""
+ if not email_message.recipients():
+ return False
+ try:
+ self.connection.sendmail(email_message.from_email,
+ email_message.recipients(),
+ email_message.message().as_string())
+ except:
+ if not self.fail_silently:
+ raise
+ return False
+ return True
+
+class EmailMessage(object):
+ """
+ A container for email information.
+ """
+ content_subtype = 'plain'
+ multipart_subtype = 'mixed'
+ encoding = None # None => use settings default
+
+ def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
+ connection=None, attachments=None, headers=None):
+ """
+ Initialize a single email message (which can be sent to multiple
+ recipients).
+
+ All strings used to create the message can be unicode strings (or UTF-8
+ bytestrings). The SafeMIMEText class will handle any necessary encoding
+ conversions.
+ """
+ if to:
+ assert not isinstance(to, basestring), '"to" argument must be a list or tuple'
+ self.to = list(to)
+ else:
+ self.to = []
+ if bcc:
+ assert not isinstance(bcc, basestring), '"bcc" argument must be a list or tuple'
+ self.bcc = list(bcc)
+ else:
+ self.bcc = []
+ self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
+ self.subject = subject
+ self.body = body
+ self.attachments = attachments or []
+ self.extra_headers = headers or {}
+ self.connection = connection
+
+ def get_connection(self, fail_silently=False):
+ if not self.connection:
+ self.connection = SMTPConnection(fail_silently=fail_silently)
+ return self.connection
+
+ def message(self):
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ msg = SafeMIMEText(smart_str(self.body, settings.DEFAULT_CHARSET),
+ self.content_subtype, encoding)
+ if self.attachments:
+ body_msg = msg
+ msg = SafeMIMEMultipart(_subtype=self.multipart_subtype)
+ if self.body:
+ msg.attach(body_msg)
+ for attachment in self.attachments:
+ if isinstance(attachment, MIMEBase):
+ msg.attach(attachment)
+ else:
+ msg.attach(self._create_attachment(*attachment))
+ msg['Subject'] = self.subject
+ msg['From'] = self.from_email
+ msg['To'] = ', '.join(self.to)
+ msg['Date'] = formatdate()
+ msg['Message-ID'] = make_msgid()
+ for name, value in self.extra_headers.items():
+ msg[name] = value
+ return msg
+
+ def recipients(self):
+ """
+ Returns a list of all recipients of the email (includes direct
+ addressees as well as Bcc entries).
+ """
+ return self.to + self.bcc
+
+ def send(self, fail_silently=False):
+ """Sends the email message."""
+ return self.get_connection(fail_silently).send_messages([self])
+
+ def attach(self, filename=None, content=None, mimetype=None):
+ """
+ Attaches a file with the given filename and content. The filename can
+ be omitted (useful for multipart/alternative messages) and the mimetype
+ is guessed, if not provided.
+
+ If the first parameter is a MIMEBase subclass it is inserted directly
+ into the resulting message attachments.
+ """
+ if isinstance(filename, MIMEBase):
+ assert content == mimetype == None
+ self.attachments.append(filename)
+ else:
+ assert content is not None
+ self.attachments.append((filename, content, mimetype))
+
+ def attach_file(self, path, mimetype=None):
+ """Attaches a file from the filesystem."""
+ filename = os.path.basename(path)
+ content = open(path, 'rb').read()
+ self.attach(filename, content, mimetype)
+
+ def _create_attachment(self, filename, content, mimetype=None):
+ """
+ Converts the filename, content, mimetype triple into a MIME attachment
+ object.
+ """
+ if mimetype is None:
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype is None:
+ mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
+ basetype, subtype = mimetype.split('/', 1)
+ if basetype == 'text':
+ attachment = SafeMIMEText(smart_str(content,
+ settings.DEFAULT_CHARSET), subtype, settings.DEFAULT_CHARSET)
+ else:
+ # Encode non-text attachments with base64.
+ attachment = MIMEBase(basetype, subtype)
+ attachment.set_payload(content)
+ Encoders.encode_base64(attachment)
+ if filename:
+ attachment.add_header('Content-Disposition', 'attachment',
+ filename=filename)
+ return attachment
+
+class EmailMultiAlternatives(EmailMessage):
+ """
+ A version of EmailMessage that makes it easy to send multipart/alternative
+ messages. For example, including text and HTML versions of the text is
+ made easier.
+ """
+ multipart_subtype = 'alternative'
+
+ def attach_alternative(self, content, mimetype=None):
+ """Attach an alternative content representation."""
+ self.attach(content=content, mimetype=mimetype)
+
+def send_mail(subject, message, from_email, recipient_list,
+ fail_silently=False, auth_user=None, auth_password=None):
+ """
+ Easy wrapper for sending a single message to a recipient list. All members
+ of the recipient list will see the other recipients in the 'To' field.
+
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = SMTPConnection(username=auth_user, password=auth_password,
+ fail_silently=fail_silently)
+ return EmailMessage(subject, message, from_email, recipient_list,
+ connection=connection).send()
+
+def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
+ auth_password=None):
+ """
+ Given a datatuple of (subject, message, from_email, recipient_list), sends
+ each message to each recipient list. Returns the number of e-mails sent.
+
+ If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
+ If auth_user and auth_password are set, they're used to log in.
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = SMTPConnection(username=auth_user, password=auth_password,
+ fail_silently=fail_silently)
+ messages = [EmailMessage(subject, message, sender, recipient)
+ for subject, message, sender, recipient in datatuple]
+ return connection.send_messages(messages)
+
+def mail_admins(subject, message, fail_silently=False):
+ """Sends a message to the admins, as defined by the ADMINS setting."""
+ EmailMessage(settings.EMAIL_SUBJECT_PREFIX + subject, message,
+ settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS]
+ ).send(fail_silently=fail_silently)
+
+def mail_managers(subject, message, fail_silently=False):
+ """Sends a message to the managers, as defined by the MANAGERS setting."""
+ EmailMessage(settings.EMAIL_SUBJECT_PREFIX + subject, message,
+ settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS]
+ ).send(fail_silently=fail_silently)
diff --git a/webapp/django/core/management/__init__.py b/webapp/django/core/management/__init__.py
new file mode 100644
index 0000000000..fc1e82b8fc
--- /dev/null
+++ b/webapp/django/core/management/__init__.py
@@ -0,0 +1,334 @@
+import os
+import sys
+from optparse import OptionParser
+import imp
+
+import django
+from django.core.management.base import BaseCommand, CommandError, handle_default_options
+
+# For backwards compatibility: get_version() used to be in this module.
+get_version = django.get_version
+
+# A cache of loaded commands, so that call_command
+# doesn't have to reload every time it's called.
+_commands = None
+
+def find_commands(management_dir):
+ """
+ Given a path to a management directory, returns a list of all the command
+ names that are available.
+
+ Returns an empty list if no commands are defined.
+ """
+ command_dir = os.path.join(management_dir, 'commands')
+ try:
+ return [f[:-3] for f in os.listdir(command_dir)
+ if not f.startswith('_') and f.endswith('.py')]
+ except OSError:
+ return []
+
+def find_management_module(app_name):
+ """
+ Determines the path to the management module for the given app_name,
+ without actually importing the application or the management module.
+
+ Raises ImportError if the management module cannot be found for any reason.
+ """
+ parts = app_name.split('.')
+ parts.append('management')
+ parts.reverse()
+ part = parts.pop()
+ path = None
+
+ # When using manage.py, the project module is added to the path,
+ # loaded, then removed from the path. This means that
+ # testproject.testapp.models can be loaded in future, even if
+ # testproject isn't in the path. When looking for the management
+ # module, we need look for the case where the project name is part
+ # of the app_name but the project directory itself isn't on the path.
+ try:
+ f, path, descr = imp.find_module(part,path)
+ except ImportError,e:
+ if os.path.basename(os.getcwd()) != part:
+ raise e
+
+ while parts:
+ part = parts.pop()
+ f, path, descr = imp.find_module(part, path and [path] or None)
+ return path
+
+def load_command_class(app_name, name):
+ """
+ Given a command name and an application name, returns the Command
+ class instance. All errors raised by the import process
+ (ImportError, AttributeError) are allowed to propagate.
+ """
+ return getattr(__import__('%s.management.commands.%s' % (app_name, name),
+ {}, {}, ['Command']), 'Command')()
+
+def get_commands():
+ """
+ Returns a dictionary mapping command names to their callback applications.
+
+ This works by looking for a management.commands package in django.core, and
+ in each installed application -- if a commands package exists, all commands
+ in that package are registered.
+
+ Core commands are always included. If a settings module has been
+ specified, user-defined commands will also be included, the
+ startproject command will be disabled, and the startapp command
+ will be modified to use the directory in which the settings module appears.
+
+ The dictionary is in the format {command_name: app_name}. Key-value
+ pairs from this dictionary can then be used in calls to
+ load_command_class(app_name, command_name)
+
+ If a specific version of a command must be loaded (e.g., with the
+ startapp command), the instantiated module can be placed in the
+ dictionary in place of the application name.
+
+ The dictionary is cached on the first call and reused on subsequent
+ calls.
+ """
+ global _commands
+ if _commands is None:
+ _commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
+
+ # Find the installed apps
+ try:
+ from django.conf import settings
+ apps = settings.INSTALLED_APPS
+ except (AttributeError, EnvironmentError, ImportError):
+ apps = []
+
+ # Find the project directory
+ try:
+ from django.conf import settings
+ project_directory = setup_environ(
+ __import__(
+ settings.SETTINGS_MODULE, {}, {},
+ (settings.SETTINGS_MODULE.split(".")[-1],)
+ )
+ )
+ except (AttributeError, EnvironmentError, ImportError):
+ project_directory = None
+
+ # Find and load the management module for each installed app.
+ for app_name in apps:
+ try:
+ path = find_management_module(app_name)
+ _commands.update(dict([(name, app_name)
+ for name in find_commands(path)]))
+ except ImportError:
+ pass # No management module - ignore this app
+
+ if project_directory:
+ # Remove the "startproject" command from self.commands, because
+ # that's a django-admin.py command, not a manage.py command.
+ del _commands['startproject']
+
+ # Override the startapp command so that it always uses the
+ # project_directory, not the current working directory
+ # (which is default).
+ from django.core.management.commands.startapp import ProjectCommand
+ _commands['startapp'] = ProjectCommand(project_directory)
+
+ return _commands
+
+def call_command(name, *args, **options):
+ """
+ Calls the given command, with the given options and args/kwargs.
+
+ This is the primary API you should use for calling specific commands.
+
+ Some examples:
+ call_command('syncdb')
+ call_command('shell', plain=True)
+ call_command('sqlall', 'myapp')
+ """
+ try:
+ app_name = get_commands()[name]
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, name)
+ except KeyError:
+ raise CommandError, "Unknown command: %r" % name
+ return klass.execute(*args, **options)
+
+class LaxOptionParser(OptionParser):
+ """
+ An option parser that doesn't raise any errors on unknown options.
+
+ This is needed because the --settings and --pythonpath options affect
+ the commands (and thus the options) that are available to the user.
+ """
+ def error(self, msg):
+ pass
+
+ def print_help(self):
+ """Output nothing.
+
+ The lax options are included in the normal option parser, so under
+ normal usage, we don't need to print the lax options.
+ """
+ pass
+
+ def print_lax_help(self):
+ """Output the basic options available to every command.
+
+ This just redirects to the default print_help() behaviour.
+ """
+ OptionParser.print_help(self)
+
+ def _process_args(self, largs, rargs, values):
+ """
+ Overrides OptionParser._process_args to exclusively handle default
+ options and ignore args and other options.
+
+ This overrides the behavior of the super class, which stop parsing
+ at the first unrecognized option.
+ """
+ while rargs:
+ arg = rargs[0]
+ try:
+ if arg[0:2] == "--" and len(arg) > 2:
+ # process a single long option (possibly with value(s))
+ # the superclass code pops the arg off rargs
+ self._process_long_opt(rargs, values)
+ elif arg[:1] == "-" and len(arg) > 1:
+ # process a cluster of short options (possibly with
+ # value(s) for the last one only)
+ # the superclass code pops the arg off rargs
+ self._process_short_opts(rargs, values)
+ else:
+ # it's either a non-default option or an arg
+ # either way, add it to the args list so we can keep
+ # dealing with options
+ del rargs[0]
+ raise error
+ except:
+ largs.append(arg)
+
+class ManagementUtility(object):
+ """
+ Encapsulates the logic of the django-admin.py and manage.py utilities.
+
+ A ManagementUtility has a number of commands, which can be manipulated
+ by editing the self.commands dictionary.
+ """
+ def __init__(self, argv=None):
+ self.argv = argv or sys.argv[:]
+ self.prog_name = os.path.basename(self.argv[0])
+
+ def main_help_text(self):
+ """
+ Returns the script's main help text, as a string.
+ """
+ usage = ['',"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,'']
+ usage.append('Available subcommands:')
+ commands = get_commands().keys()
+ commands.sort()
+ for cmd in commands:
+ usage.append(' %s' % cmd)
+ return '\n'.join(usage)
+
+ def fetch_command(self, subcommand):
+ """
+ Tries to fetch the given subcommand, printing a message with the
+ appropriate command called from the command line (usually
+ "django-admin.py" or "manage.py") if it can't be found.
+ """
+ try:
+ app_name = get_commands()[subcommand]
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, subcommand)
+ except KeyError:
+ sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" % \
+ (subcommand, self.prog_name))
+ sys.exit(1)
+ return klass
+
+ def execute(self):
+ """
+ Given the command-line arguments, this figures out which subcommand is
+ being run, creates a parser appropriate to that command, and runs it.
+ """
+ # Preprocess options to extract --settings and --pythonpath.
+ # These options could affect the commands that are available, so they
+ # must be processed early.
+ parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
+ version=get_version(),
+ option_list=BaseCommand.option_list)
+ try:
+ options, args = parser.parse_args(self.argv)
+ handle_default_options(options)
+ except:
+ pass # Ignore any option errors at this point.
+
+ try:
+ subcommand = self.argv[1]
+ except IndexError:
+ sys.stderr.write("Type '%s help' for usage.\n" % self.prog_name)
+ sys.exit(1)
+
+ if subcommand == 'help':
+ if len(args) > 2:
+ self.fetch_command(args[2]).print_help(self.prog_name, args[2])
+ else:
+ parser.print_lax_help()
+ sys.stderr.write(self.main_help_text() + '\n')
+ sys.exit(1)
+ # Special-cases: We want 'django-admin.py --version' and
+ # 'django-admin.py --help' to work, for backwards compatibility.
+ elif self.argv[1:] == ['--version']:
+ # LaxOptionParser already takes care of printing the version.
+ pass
+ elif self.argv[1:] == ['--help']:
+ parser.print_lax_help()
+ sys.stderr.write(self.main_help_text() + '\n')
+ else:
+ self.fetch_command(subcommand).run_from_argv(self.argv)
+
+def setup_environ(settings_mod):
+ """
+ Configures the runtime environment. This can also be used by external
+ scripts wanting to set up a similar environment to manage.py.
+ Returns the project directory (assuming the passed settings module is
+ directly in the project directory).
+ """
+ # Add this project to sys.path so that it's importable in the conventional
+ # way. For example, if this file (manage.py) lives in a directory
+ # "myproject", this code would add "/path/to/myproject" to sys.path.
+ project_directory, settings_filename = os.path.split(settings_mod.__file__)
+ if project_directory == os.curdir or not project_directory:
+ project_directory = os.getcwd()
+ project_name = os.path.basename(project_directory)
+ settings_name = os.path.splitext(settings_filename)[0]
+ sys.path.append(os.path.join(project_directory, os.pardir))
+ project_module = __import__(project_name, {}, {}, [''])
+ sys.path.pop()
+
+ # Set DJANGO_SETTINGS_MODULE appropriately.
+ os.environ['DJANGO_SETTINGS_MODULE'] = '%s.%s' % (project_name, settings_name)
+ return project_directory
+
+def execute_from_command_line(argv=None):
+ """
+ A simple method that runs a ManagementUtility.
+ """
+ utility = ManagementUtility(argv)
+ utility.execute()
+
+def execute_manager(settings_mod, argv=None):
+ """
+ Like execute_from_command_line(), but for use by manage.py, a
+ project-specific django-admin.py utility.
+ """
+ setup_environ(settings_mod)
+ utility = ManagementUtility(argv)
+ utility.execute()
diff --git a/webapp/django/core/management/base.py b/webapp/django/core/management/base.py
new file mode 100644
index 0000000000..20cc7c0f40
--- /dev/null
+++ b/webapp/django/core/management/base.py
@@ -0,0 +1,242 @@
+import os
+import sys
+from optparse import make_option, OptionParser
+
+import django
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management.color import color_style
+
+try:
+ set
+except NameError:
+ from sets import Set as set # For Python 2.3
+
+class CommandError(Exception):
+ pass
+
+def handle_default_options(options):
+ """
+ Include any default options that all commands should accept
+ here so that ManagementUtility can handle them before searching
+ for user commands.
+ """
+ if options.settings:
+ os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
+ if options.pythonpath:
+ sys.path.insert(0, options.pythonpath)
+
+class BaseCommand(object):
+ # Metadata about this command.
+ option_list = (
+ make_option('--settings',
+ help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
+ make_option('--pythonpath',
+ help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
+ make_option('--traceback', action='store_true',
+ help='Print traceback on exception'),
+ )
+ help = ''
+ args = ''
+
+ # Configuration shortcuts that alter various logic.
+ can_import_settings = True
+ requires_model_validation = True
+ output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
+
+ def __init__(self):
+ self.style = color_style()
+
+ def get_version(self):
+ """
+ Returns the Django version, which should be correct for all built-in
+ Django commands. User-supplied commands should override this method.
+ """
+ return django.get_version()
+
+ def usage(self, subcommand):
+ usage = '%%prog %s [options] %s' % (subcommand, self.args)
+ if self.help:
+ return '%s\n\n%s' % (usage, self.help)
+ else:
+ return usage
+
+ def create_parser(self, prog_name, subcommand):
+ return OptionParser(prog=prog_name,
+ usage=self.usage(subcommand),
+ version=self.get_version(),
+ option_list=self.option_list)
+
+ def print_help(self, prog_name, subcommand):
+ parser = self.create_parser(prog_name, subcommand)
+ parser.print_help()
+
+ def run_from_argv(self, argv):
+ parser = self.create_parser(argv[0], argv[1])
+ options, args = parser.parse_args(argv[2:])
+ handle_default_options(options)
+ self.execute(*args, **options.__dict__)
+
+ def execute(self, *args, **options):
+ # Switch to English, because django-admin.py creates database content
+ # like permissions, and those shouldn't contain any translations.
+ # But only do this if we can assume we have a working settings file,
+ # because django.utils.translation requires settings.
+ if self.can_import_settings:
+ try:
+ from django.utils import translation
+ translation.activate('en-us')
+ except ImportError, e:
+ # If settings should be available, but aren't,
+ # raise the error and quit.
+ sys.stderr.write(self.style.ERROR(str('Error: %s\n' % e)))
+ sys.exit(1)
+ try:
+ if self.requires_model_validation:
+ self.validate()
+ output = self.handle(*args, **options)
+ if output:
+ if self.output_transaction:
+ # This needs to be imported here, because it relies on settings.
+ from django.db import connection
+ if connection.ops.start_transaction_sql():
+ print self.style.SQL_KEYWORD(connection.ops.start_transaction_sql())
+ print output
+ if self.output_transaction:
+ print self.style.SQL_KEYWORD("COMMIT;")
+ except CommandError, e:
+ sys.stderr.write(self.style.ERROR(str('Error: %s\n' % e)))
+ sys.exit(1)
+
+ def validate(self, app=None, display_num_errors=False):
+ """
+ Validates the given app, raising CommandError for any errors.
+
+ If app is None, then this will validate all installed apps.
+ """
+ from django.core.management.validation import get_validation_errors
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+ s = StringIO()
+ num_errors = get_validation_errors(s, app)
+ if num_errors:
+ s.seek(0)
+ error_text = s.read()
+ raise CommandError("One or more models did not validate:\n%s" % error_text)
+ if display_num_errors:
+ print "%s error%s found" % (num_errors, num_errors != 1 and 's' or '')
+
+ def handle(self, *args, **options):
+ raise NotImplementedError()
+
+class AppCommand(BaseCommand):
+ args = '<appname appname ...>'
+
+ def handle(self, *app_labels, **options):
+ from django.db import models
+ if not app_labels:
+ raise CommandError('Enter at least one appname.')
+ try:
+ app_list = [models.get_app(app_label) for app_label in app_labels]
+ except (ImproperlyConfigured, ImportError), e:
+ raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
+ output = []
+ for app in app_list:
+ app_output = self.handle_app(app, **options)
+ if app_output:
+ output.append(app_output)
+ return '\n'.join(output)
+
+ def handle_app(self, app, **options):
+ raise NotImplementedError()
+
+class LabelCommand(BaseCommand):
+ args = '<label label ...>'
+ label = 'label'
+
+ def handle(self, *labels, **options):
+ if not labels:
+ raise CommandError('Enter at least one %s.' % self.label)
+
+ output = []
+ for label in labels:
+ label_output = self.handle_label(label, **options)
+ if label_output:
+ output.append(label_output)
+ return '\n'.join(output)
+
+ def handle_label(self, label, **options):
+ raise NotImplementedError()
+
+class NoArgsCommand(BaseCommand):
+ args = ''
+
+ def handle(self, *args, **options):
+ if args:
+ raise CommandError("Command doesn't accept any arguments")
+ return self.handle_noargs(**options)
+
+ def handle_noargs(self, **options):
+ raise NotImplementedError()
+
+def copy_helper(style, app_or_project, name, directory, other_name=''):
+ """
+ Copies either a Django application layout template or a Django project
+ layout template into the specified directory.
+ """
+ # style -- A color style object (see django.core.management.color).
+ # app_or_project -- The string 'app' or 'project'.
+ # name -- The name of the application or project.
+ # directory -- The directory to which the layout template should be copied.
+ # other_name -- When copying an application layout, this should be the name
+ # of the project.
+ import re
+ import shutil
+ other = {'project': 'app', 'app': 'project'}[app_or_project]
+ if not re.search(r'^\w+$', name): # If it's not a valid directory name.
+ raise CommandError("%r is not a valid %s name. Please use only numbers, letters and underscores." % (name, app_or_project))
+ top_dir = os.path.join(directory, name)
+ try:
+ os.mkdir(top_dir)
+ except OSError, e:
+ raise CommandError(e)
+
+ # Determine where the app or project templates are. Use
+ # django.__path__[0] because we don't know into which directory
+ # django has been installed.
+ template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
+
+ for d, subdirs, files in os.walk(template_dir):
+ relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
+ if relative_dir:
+ os.mkdir(os.path.join(top_dir, relative_dir))
+ for i, subdir in enumerate(subdirs):
+ if subdir.startswith('.'):
+ del subdirs[i]
+ for f in files:
+ if f.endswith('.pyc'):
+ continue
+ path_old = os.path.join(d, f)
+ path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
+ fp_old = open(path_old, 'r')
+ fp_new = open(path_new, 'w')
+ fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
+ fp_old.close()
+ fp_new.close()
+ try:
+ shutil.copymode(path_old, path_new)
+ _make_writeable(path_new)
+ except OSError:
+ sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
+
+def _make_writeable(filename):
+ "Makes sure that the file is writeable. Useful if our source is read-only."
+ import stat
+ if sys.platform.startswith('java'):
+ # On Jython there is no os.access()
+ return
+ if not os.access(filename, os.W_OK):
+ st = os.stat(filename)
+ new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
+ os.chmod(filename, new_permissions)
diff --git a/webapp/django/core/management/color.py b/webapp/django/core/management/color.py
new file mode 100644
index 0000000000..f5e39398f4
--- /dev/null
+++ b/webapp/django/core/management/color.py
@@ -0,0 +1,41 @@
+"""
+Sets up the terminal color scheme.
+"""
+
+import sys
+
+from django.utils import termcolors
+
+def supports_color():
+ """
+ Returns True if the running system's terminal supports color, and False
+ otherwise.
+ """
+ unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
+ # isatty is not always implemented, #6223.
+ is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
+ if unsupported_platform or not is_a_tty:
+ return False
+ return True
+
+def color_style():
+ """Returns a Style object with the Django color scheme."""
+ if not supports_color():
+ return no_style()
+ class dummy: pass
+ style = dummy()
+ style.ERROR = termcolors.make_style(fg='red', opts=('bold',))
+ style.ERROR_OUTPUT = termcolors.make_style(fg='red', opts=('bold',))
+ style.NOTICE = termcolors.make_style(fg='red')
+ style.SQL_FIELD = termcolors.make_style(fg='green', opts=('bold',))
+ style.SQL_COLTYPE = termcolors.make_style(fg='green')
+ style.SQL_KEYWORD = termcolors.make_style(fg='yellow')
+ style.SQL_TABLE = termcolors.make_style(opts=('bold',))
+ return style
+
+def no_style():
+ """Returns a Style object that has no colors."""
+ class dummy:
+ def __getattr__(self, attr):
+ return lambda x: x
+ return dummy()
diff --git a/webapp/django/core/management/commands/__init__.py b/webapp/django/core/management/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/webapp/django/core/management/commands/__init__.py
diff --git a/webapp/django/core/management/commands/adminindex.py b/webapp/django/core/management/commands/adminindex.py
new file mode 100644
index 0000000000..4f389136ca
--- /dev/null
+++ b/webapp/django/core/management/commands/adminindex.py
@@ -0,0 +1,34 @@
+from django.core.management.base import AppCommand
+from django.utils.encoding import force_unicode
+from django.utils.text import capfirst
+
+MODULE_TEMPLATE = ''' {%% if perms.%(app)s.%(addperm)s or perms.%(app)s.%(changeperm)s %%}
+ <tr>
+ <th>{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/">{%% endif %%}%(name)s{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</th>
+ <td class="x50">{%% if perms.%(app)s.%(addperm)s %%}<a href="%(app)s/%(mod)s/add/" class="addlink">{%% endif %%}Add{%% if perms.%(app)s.%(addperm)s %%}</a>{%% endif %%}</td>
+ <td class="x75">{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/" class="changelink">{%% endif %%}Change{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</td>
+ </tr>
+ {%% endif %%}'''
+
+class Command(AppCommand):
+ help = 'Prints the admin-index template snippet for the given app name(s).'
+
+ def handle_app(self, app, **options):
+ from django.db.models import get_models
+ output = []
+ app_models = get_models(app)
+ app_label = app_models[0]._meta.app_label
+ output.append('{%% if perms.%s %%}' % app_label)
+ output.append('<div class="module"><h2>%s</h2><table>' % app_label.title())
+ for model in app_models:
+ if model._meta.admin:
+ output.append(MODULE_TEMPLATE % {
+ 'app': app_label,
+ 'mod': model._meta.module_name,
+ 'name': force_unicode(capfirst(model._meta.verbose_name_plural)),
+ 'addperm': model._meta.get_add_permission(),
+ 'changeperm': model._meta.get_change_permission(),
+ })
+ output.append('</table></div>')
+ output.append('{% endif %}')
+ return '\n'.join(output)
diff --git a/webapp/django/core/management/commands/cleanup.py b/webapp/django/core/management/commands/cleanup.py
new file mode 100644
index 0000000000..a5c932b686
--- /dev/null
+++ b/webapp/django/core/management/commands/cleanup.py
@@ -0,0 +1,11 @@
+import datetime
+from django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Can be run as a cronjob or directly to clean out old data from the database (only expired sessions at the moment)."
+
+ def handle_noargs(self, **options):
+ from django.db import transaction
+ from django.contrib.sessions.models import Session
+ Session.objects.filter(expire_date__lt=datetime.datetime.now()).delete()
+ transaction.commit_unless_managed()
diff --git a/webapp/django/core/management/commands/compilemessages.py b/webapp/django/core/management/commands/compilemessages.py
new file mode 100644
index 0000000000..d1f1647f8b
--- /dev/null
+++ b/webapp/django/core/management/commands/compilemessages.py
@@ -0,0 +1,57 @@
+import os
+import sys
+from optparse import make_option
+from django.core.management.base import BaseCommand, CommandError
+
+try:
+ set
+except NameError:
+ from sets import Set as set # For Python 2.3
+
+def compile_messages(locale=None):
+ basedirs = [os.path.join('conf', 'locale'), 'locale']
+ if os.environ.get('DJANGO_SETTINGS_MODULE'):
+ from django.conf import settings
+ basedirs.extend(settings.LOCALE_PATHS)
+
+ # Gather existing directories.
+ basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
+
+ if not basedirs:
+ raise CommandError("This script should be run from the Django SVN tree or your project or app tree, or with the settings module specified.")
+
+ for basedir in basedirs:
+ if locale:
+ basedir = os.path.join(basedir, locale, 'LC_MESSAGES')
+ for dirpath, dirnames, filenames in os.walk(basedir):
+ for f in filenames:
+ if f.endswith('.po'):
+ sys.stderr.write('processing file %s in %s\n' % (f, dirpath))
+ pf = os.path.splitext(os.path.join(dirpath, f))[0]
+ # Store the names of the .mo and .po files in an environment
+ # variable, rather than doing a string replacement into the
+ # command, so that we can take advantage of shell quoting, to
+ # quote any malicious characters/escaping.
+ # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html
+ os.environ['djangocompilemo'] = pf + '.mo'
+ os.environ['djangocompilepo'] = pf + '.po'
+ if sys.platform == 'win32': # Different shell-variable syntax
+ cmd = 'msgfmt --check-format -o "%djangocompilemo%" "%djangocompilepo%"'
+ else:
+ cmd = 'msgfmt --check-format -o "$djangocompilemo" "$djangocompilepo"'
+ os.system(cmd)
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--locale', '-l', dest='locale',
+ help='The locale to process. Default is to process all.'),
+ )
+ help = 'Compiles .po files to .mo files for use with builtin gettext support.'
+
+ requires_model_validation = False
+ can_import_settings = False
+
+ def handle(self, **options):
+ locale = options.get('locale')
+ compile_messages(locale)
diff --git a/webapp/django/core/management/commands/createcachetable.py b/webapp/django/core/management/commands/createcachetable.py
new file mode 100644
index 0000000000..098bca793f
--- /dev/null
+++ b/webapp/django/core/management/commands/createcachetable.py
@@ -0,0 +1,42 @@
+from django.core.management.base import LabelCommand
+
+class Command(LabelCommand):
+ help = "Creates the table needed to use the SQL cache backend."
+ args = "<tablename>"
+ label = 'tablename'
+
+ requires_model_validation = False
+
+ def handle_label(self, tablename, **options):
+ from django.db import connection, transaction, models
+ fields = (
+ # "key" is a reserved word in MySQL, so use "cache_key" instead.
+ models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
+ models.TextField(name='value'),
+ models.DateTimeField(name='expires', db_index=True),
+ )
+ table_output = []
+ index_output = []
+ qn = connection.ops.quote_name
+ for f in fields:
+ field_output = [qn(f.name), f.db_type()]
+ field_output.append("%sNULL" % (not f.null and "NOT " or ""))
+ if f.primary_key:
+ field_output.append("PRIMARY KEY")
+ elif f.unique:
+ field_output.append("UNIQUE")
+ if f.db_index:
+ unique = f.unique and "UNIQUE " or ""
+ index_output.append("CREATE %sINDEX %s_%s ON %s (%s);" % \
+ (unique, tablename, f.name, qn(tablename),
+ qn(f.name)))
+ table_output.append(" ".join(field_output))
+ full_statement = ["CREATE TABLE %s (" % qn(tablename)]
+ for i, line in enumerate(table_output):
+ full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
+ full_statement.append(');')
+ curs = connection.cursor()
+ curs.execute("\n".join(full_statement))
+ for statement in index_output:
+ curs.execute(statement)
+ transaction.commit_unless_managed()
diff --git a/webapp/django/core/management/commands/dbshell.py b/webapp/django/core/management/commands/dbshell.py
new file mode 100644
index 0000000000..18faa6a130
--- /dev/null
+++ b/webapp/django/core/management/commands/dbshell.py
@@ -0,0 +1,10 @@
+from django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Runs the command-line client for the current DATABASE_ENGINE."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ from django.db import connection
+ connection.client.runshell()
diff --git a/webapp/django/core/management/commands/diffsettings.py b/webapp/django/core/management/commands/diffsettings.py
new file mode 100644
index 0000000000..2459f11700
--- /dev/null
+++ b/webapp/django/core/management/commands/diffsettings.py
@@ -0,0 +1,32 @@
+from django.core.management.base import NoArgsCommand
+
+def module_to_dict(module, omittable=lambda k: k.startswith('_')):
+ "Converts a module namespace to a Python dictionary. Used by get_settings_diff."
+ return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
+
+class Command(NoArgsCommand):
+ help = """Displays differences between the current settings.py and Django's
+ default settings. Settings that don't appear in the defaults are
+ followed by "###"."""
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ # Inspired by Postfix's "postconf -n".
+ from django.conf import settings, global_settings
+
+ # Because settings are imported lazily, we need to explicitly load them.
+ settings._import_settings()
+
+ user_settings = module_to_dict(settings._target)
+ default_settings = module_to_dict(global_settings)
+
+ output = []
+ keys = user_settings.keys()
+ keys.sort()
+ for key in keys:
+ if key not in default_settings:
+ output.append("%s = %s ###" % (key, user_settings[key]))
+ elif user_settings[key] != default_settings[key]:
+ output.append("%s = %s" % (key, user_settings[key]))
+ print '\n'.join(output)
diff --git a/webapp/django/core/management/commands/dumpdata.py b/webapp/django/core/management/commands/dumpdata.py
new file mode 100644
index 0000000000..2559d57104
--- /dev/null
+++ b/webapp/django/core/management/commands/dumpdata.py
@@ -0,0 +1,52 @@
+from django.core.management.base import BaseCommand, CommandError
+from django.core import serializers
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--format', default='json', dest='format',
+ help='Specifies the output serialization format for fixtures.'),
+ make_option('--indent', default=None, dest='indent', type='int',
+ help='Specifies the indent level to use when pretty-printing output'),
+ make_option('-e', '--exclude', dest='exclude',action='append', default=[],
+ help='App to exclude (use multiple --exclude to exclude multiple apps).'),
+ )
+ help = 'Output the contents of the database as a fixture of the given format.'
+ args = '[appname ...]'
+
+ def handle(self, *app_labels, **options):
+ from django.db.models import get_app, get_apps, get_models
+
+ format = options.get('format','json')
+ indent = options.get('indent',None)
+ exclude = options.get('exclude',[])
+ show_traceback = options.get('traceback', False)
+
+ excluded_apps = [get_app(app_label) for app_label in exclude]
+
+ if len(app_labels) == 0:
+ app_list = [app for app in get_apps() if app not in excluded_apps]
+ else:
+ app_list = [get_app(app_label) for app_label in app_labels]
+
+ # Check that the serialization format exists; this is a shortcut to
+ # avoid collating all the objects and _then_ failing.
+ if format not in serializers.get_public_serializer_formats():
+ raise CommandError("Unknown serialization format: %s" % format)
+
+ try:
+ serializers.get_serializer(format)
+ except KeyError:
+ raise CommandError("Unknown serialization format: %s" % format)
+
+ objects = []
+ for app in app_list:
+ for model in get_models(app):
+ objects.extend(model._default_manager.all())
+ try:
+ return serializers.serialize(format, objects, indent=indent)
+ except Exception, e:
+ if show_traceback:
+ raise
+ raise CommandError("Unable to serialize database: %s" % e)
diff --git a/webapp/django/core/management/commands/flush.py b/webapp/django/core/management/commands/flush.py
new file mode 100644
index 0000000000..51d5034ff4
--- /dev/null
+++ b/webapp/django/core/management/commands/flush.py
@@ -0,0 +1,70 @@
+from django.core.management.base import NoArgsCommand, CommandError
+from django.core.management.color import no_style
+from optparse import make_option
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ )
+ help = "Executes ``sqlflush`` on the current database."
+
+ def handle_noargs(self, **options):
+ from django.conf import settings
+ from django.db import connection, transaction, models
+ from django.core.management.sql import sql_flush, emit_post_sync_signal
+
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive')
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ __import__(app_name + '.management', {}, {}, [''])
+ except ImportError:
+ pass
+
+ sql_list = sql_flush(self.style, only_django=True)
+
+ if interactive:
+ confirm = raw_input("""You have requested a flush of the database.
+This will IRREVERSIBLY DESTROY all data currently in the %r database,
+and return each table to the state it was in after syncdb.
+Are you sure you want to do this?
+
+ Type 'yes' to continue, or 'no' to cancel: """ % settings.DATABASE_NAME)
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ transaction.rollback_unless_managed()
+ raise CommandError("""Database %s couldn't be flushed. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the expected database tables doesn't exist.
+ * The SQL was invalid.
+ Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
+ The full error: %s""" % (settings.DATABASE_NAME, e))
+ transaction.commit_unless_managed()
+
+ # Emit the post sync signal. This allows individual
+ # applications to respond as if the database had been
+ # sync'd from scratch.
+ emit_post_sync_signal(models.get_models(), verbosity, interactive)
+
+ # Reinstall the initial_data fixture.
+ from django.core.management import call_command
+ call_command('loaddata', 'initial_data', **options)
+
+ else:
+ print "Flush cancelled."
diff --git a/webapp/django/core/management/commands/inspectdb.py b/webapp/django/core/management/commands/inspectdb.py
new file mode 100644
index 0000000000..6e84ed64de
--- /dev/null
+++ b/webapp/django/core/management/commands/inspectdb.py
@@ -0,0 +1,125 @@
+from django.core.management.base import NoArgsCommand, CommandError
+
+class Command(NoArgsCommand):
+ help = "Introspects the database tables in the given database and outputs a Django model module."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ try:
+ for line in self.handle_inspection():
+ print line
+ except NotImplementedError:
+ raise CommandError("Database inspection isn't supported for the currently selected database backend.")
+
+ def handle_inspection(self):
+ from django.db import connection
+ import keyword
+
+ table2model = lambda table_name: table_name.title().replace('_', '').replace(' ', '').replace('-', '')
+
+ cursor = connection.cursor()
+ yield "# This is an auto-generated Django model module."
+ yield "# You'll have to do the following manually to clean this up:"
+ yield "# * Rearrange models' order"
+ yield "# * Make sure each model has one field with primary_key=True"
+ yield "# Feel free to rename the models, but don't rename db_table values or field names."
+ yield "#"
+ yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
+ yield "# into your database."
+ yield ''
+ yield 'from django.db import models'
+ yield ''
+ for table_name in connection.introspection.get_table_list(cursor):
+ yield 'class %s(models.Model):' % table2model(table_name)
+ try:
+ relations = connection.introspection.get_relations(cursor, table_name)
+ except NotImplementedError:
+ relations = {}
+ try:
+ indexes = connection.introspection.get_indexes(cursor, table_name)
+ except NotImplementedError:
+ indexes = {}
+ for i, row in enumerate(connection.introspection.get_table_description(cursor, table_name)):
+ att_name = row[0].lower()
+ comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
+ extra_params = {} # Holds Field parameters such as 'db_column'.
+
+ # If we need to do field name modifiations,
+ # remember the original field name
+ if ' ' in att_name or '-' in att_name or keyword.iskeyword(att_name):
+ extra_params['db_column'] = att_name
+
+ # Now modify the field name to make it python compatible.
+ if ' ' in att_name:
+ att_name = att_name.replace(' ', '_')
+ comment_notes.append('Field renamed to remove spaces.')
+ if '-' in att_name:
+ att_name = att_name.replace('-', '_')
+ comment_notes.append('Field renamed to remove dashes.')
+ if keyword.iskeyword(att_name):
+ att_name += '_field'
+ comment_notes.append('Field renamed because it was a Python reserved word.')
+
+ if i in relations:
+ rel_to = relations[i][1] == table_name and "'self'" or table2model(relations[i][1])
+ field_type = 'ForeignKey(%s' % rel_to
+ if att_name.endswith('_id'):
+ att_name = att_name[:-3]
+ else:
+ extra_params['db_column'] = att_name
+ else:
+ try:
+ field_type = connection.introspection.data_types_reverse[row[1]]
+ except KeyError:
+ field_type = 'TextField'
+ comment_notes.append('This field type is a guess.')
+
+ # This is a hook for DATA_TYPES_REVERSE to return a tuple of
+ # (field_type, extra_params_dict).
+ if type(field_type) is tuple:
+ field_type, new_params = field_type
+ extra_params.update(new_params)
+
+ # Add max_length for all CharFields.
+ if field_type == 'CharField' and row[3]:
+ extra_params['max_length'] = row[3]
+
+ if field_type == 'DecimalField':
+ extra_params['max_digits'] = row[4]
+ extra_params['decimal_places'] = row[5]
+
+ # Add primary_key and unique, if necessary.
+ column_name = extra_params.get('db_column', att_name)
+ if column_name in indexes:
+ if indexes[column_name]['primary_key']:
+ extra_params['primary_key'] = True
+ elif indexes[column_name]['unique']:
+ extra_params['unique'] = True
+
+ field_type += '('
+
+ # Don't output 'id = meta.AutoField(primary_key=True)', because
+ # that's assumed if it doesn't exist.
+ if att_name == 'id' and field_type == 'AutoField(' and extra_params == {'primary_key': True}:
+ continue
+
+ # Add 'null' and 'blank', if the 'null_ok' flag was present in the
+ # table description.
+ if row[6]: # If it's NULL...
+ extra_params['blank'] = True
+ if not field_type in ('TextField(', 'CharField('):
+ extra_params['null'] = True
+
+ field_desc = '%s = models.%s' % (att_name, field_type)
+ if extra_params:
+ if not field_desc.endswith('('):
+ field_desc += ', '
+ field_desc += ', '.join(['%s=%r' % (k, v) for k, v in extra_params.items()])
+ field_desc += ')'
+ if comment_notes:
+ field_desc += ' # ' + ' '.join(comment_notes)
+ yield ' %s' % field_desc
+ yield ' class Meta:'
+ yield ' db_table = %r' % table_name
+ yield ''
diff --git a/webapp/django/core/management/commands/loaddata.py b/webapp/django/core/management/commands/loaddata.py
new file mode 100644
index 0000000000..c69eeb96bd
--- /dev/null
+++ b/webapp/django/core/management/commands/loaddata.py
@@ -0,0 +1,181 @@
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from optparse import make_option
+import sys
+import os
+
+try:
+ set
+except NameError:
+ from sets import Set as set # Python 2.3 fallback
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ help = 'Installs the named fixture(s) in the database.'
+ args = "fixture [fixture ...]"
+
+ def handle(self, *fixture_labels, **options):
+ from django.db.models import get_apps
+ from django.core import serializers
+ from django.db import connection, transaction
+ from django.conf import settings
+
+ self.style = no_style()
+
+ verbosity = int(options.get('verbosity', 1))
+ show_traceback = options.get('traceback', False)
+
+ # commit is a stealth option - it isn't really useful as
+ # a command line option, but it can be useful when invoking
+ # loaddata from within another script.
+ # If commit=True, loaddata will use its own transaction;
+ # if commit=False, the data load SQL will become part of
+ # the transaction in place when loaddata was invoked.
+ commit = options.get('commit', True)
+
+ # Keep a count of the installed objects and fixtures
+ fixture_count = 0
+ object_count = 0
+ objects_per_fixture = []
+ models = set()
+
+ humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
+
+ # Get a cursor (even though we don't need one yet). This has
+ # the side effect of initializing the test database (if
+ # it isn't already initialized).
+ cursor = connection.cursor()
+
+ # Start transaction management. All fixtures are installed in a
+ # single transaction to ensure that all references are resolved.
+ if commit:
+ transaction.commit_unless_managed()
+ transaction.enter_transaction_management()
+ transaction.managed(True)
+
+ app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()]
+ for fixture_label in fixture_labels:
+ parts = fixture_label.split('.')
+ if len(parts) == 1:
+ fixture_name = fixture_label
+ formats = serializers.get_public_serializer_formats()
+ else:
+ fixture_name, format = '.'.join(parts[:-1]), parts[-1]
+ if format in serializers.get_public_serializer_formats():
+ formats = [format]
+ else:
+ formats = []
+
+ if formats:
+ if verbosity > 1:
+ print "Loading '%s' fixtures..." % fixture_name
+ else:
+ sys.stderr.write(
+ self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
+ (fixture_name, format)))
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ return
+
+ if os.path.isabs(fixture_name):
+ fixture_dirs = [fixture_name]
+ else:
+ fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']
+
+ for fixture_dir in fixture_dirs:
+ if verbosity > 1:
+ print "Checking %s for fixtures..." % humanize(fixture_dir)
+
+ label_found = False
+ for format in formats:
+ serializer = serializers.get_serializer(format)
+ if verbosity > 1:
+ print "Trying %s for %s fixture '%s'..." % \
+ (humanize(fixture_dir), format, fixture_name)
+ try:
+ full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
+ fixture = open(full_path, 'r')
+ if label_found:
+ fixture.close()
+ print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
+ (fixture_name, humanize(fixture_dir)))
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ return
+ else:
+ fixture_count += 1
+ objects_per_fixture.append(0)
+ if verbosity > 0:
+ print "Installing %s fixture '%s' from %s." % \
+ (format, fixture_name, humanize(fixture_dir))
+ try:
+ objects = serializers.deserialize(format, fixture)
+ for obj in objects:
+ object_count += 1
+ objects_per_fixture[-1] += 1
+ models.add(obj.object.__class__)
+ obj.save()
+ label_found = True
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except Exception:
+ import traceback
+ fixture.close()
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ if show_traceback:
+ import traceback
+ traceback.print_exc()
+ else:
+ sys.stderr.write(
+ self.style.ERROR("Problem installing fixture '%s': %s\n" %
+ (full_path, traceback.format_exc())))
+ return
+ fixture.close()
+ except:
+ if verbosity > 1:
+ print "No %s fixture '%s' in %s." % \
+ (format, fixture_name, humanize(fixture_dir))
+
+
+ # If any of the fixtures we loaded contain 0 objects, assume that an
+ # error was encountered during fixture loading.
+ if 0 in objects_per_fixture:
+ sys.stderr.write(
+ self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
+ (fixture_name)))
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ return
+
+ # If we found even one object in a fixture, we need to reset the
+ # database sequences.
+ if object_count > 0:
+ sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
+ if sequence_sql:
+ if verbosity > 1:
+ print "Resetting sequences"
+ for line in sequence_sql:
+ cursor.execute(line)
+
+ if commit:
+ transaction.commit()
+ transaction.leave_transaction_management()
+
+ if object_count == 0:
+ if verbosity > 1:
+ print "No fixtures found."
+ else:
+ if verbosity > 0:
+ print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
+
+ # Close the DB connection. This is required as a workaround for an
+ # edge case in MySQL: if the same connection is used to
+ # create tables, load data, and query, the query can return
+ # incorrect results. See Django #7572, MySQL #37735.
+ if commit:
+ connection.close()
diff --git a/webapp/django/core/management/commands/makemessages.py b/webapp/django/core/management/commands/makemessages.py
new file mode 100644
index 0000000000..aaa99eed9f
--- /dev/null
+++ b/webapp/django/core/management/commands/makemessages.py
@@ -0,0 +1,205 @@
+import re
+import os
+import sys
+from itertools import dropwhile
+from optparse import make_option
+from django.core.management.base import CommandError, BaseCommand
+
+try:
+ set
+except NameError:
+ from sets import Set as set # For Python 2.3
+
+pythonize_re = re.compile(r'\n\s*//')
+
+def handle_extensions(extensions=('html',)):
+ """
+ organizes multiple extensions that are separated with commas or passed by
+ using --extension/-e multiple times.
+
+ for example: running 'django-admin makemessages -e js,txt -e xhtml -a'
+ would result in a extension list: ['.js', '.txt', '.xhtml']
+
+ >>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
+ ['.html', '.js']
+ >>> handle_extensions(['.html, txt,.tpl'])
+ ['.html', '.tpl', '.txt']
+ """
+ ext_list = []
+ for ext in extensions:
+ ext_list.extend(ext.replace(' ','').split(','))
+ for i, ext in enumerate(ext_list):
+ if not ext.startswith('.'):
+ ext_list[i] = '.%s' % ext_list[i]
+
+ # we don't want *.py files here because of the way non-*.py files
+ # are handled in make_messages() (they are copied to file.ext.py files to
+ # trick xgettext to parse them as Python files)
+ return set([x for x in ext_list if x != '.py'])
+
+def make_messages(locale=None, domain='django', verbosity='1', all=False, extensions=None):
+ """
+ Uses the locale directory from the Django SVN tree or an application/
+ project to process all
+ """
+ # Need to ensure that the i18n framework is enabled
+ from django.conf import settings
+ if settings.configured:
+ settings.USE_I18N = True
+ else:
+ settings.configure(USE_I18N = True)
+
+ from django.utils.translation import templatize
+
+ if os.path.isdir(os.path.join('conf', 'locale')):
+ localedir = os.path.abspath(os.path.join('conf', 'locale'))
+ elif os.path.isdir('locale'):
+ localedir = os.path.abspath('locale')
+ else:
+ raise CommandError("This script should be run from the Django SVN tree or your project or app tree. If you did indeed run it from the SVN checkout or your project or application, maybe you are just missing the conf/locale (in the django tree) or locale (for project and application) directory? It is not created automatically, you have to create it by hand if you want to enable i18n for your project or application.")
+
+ if domain not in ('django', 'djangojs'):
+ raise CommandError("currently makemessages only supports domains 'django' and 'djangojs'")
+
+ if (locale is None and not all) or domain is None:
+ # backwards compatible error message
+ if not sys.argv[0].endswith("make-messages.py"):
+ message = "Type '%s help %s' for usage.\n" % (os.path.basename(sys.argv[0]), sys.argv[1])
+ else:
+ message = "usage: make-messages.py -l <language>\n or: make-messages.py -a\n"
+ raise CommandError(message)
+
+ languages = []
+ if locale is not None:
+ languages.append(locale)
+ elif all:
+ languages = [el for el in os.listdir(localedir) if not el.startswith('.')]
+
+ for locale in languages:
+ if verbosity > 0:
+ print "processing language", locale
+ basedir = os.path.join(localedir, locale, 'LC_MESSAGES')
+ if not os.path.isdir(basedir):
+ os.makedirs(basedir)
+
+ pofile = os.path.join(basedir, '%s.po' % domain)
+ potfile = os.path.join(basedir, '%s.pot' % domain)
+
+ if os.path.exists(potfile):
+ os.unlink(potfile)
+
+ all_files = []
+ for (dirpath, dirnames, filenames) in os.walk("."):
+ all_files.extend([(dirpath, f) for f in filenames])
+ all_files.sort()
+ for dirpath, file in all_files:
+ file_base, file_ext = os.path.splitext(file)
+ if domain == 'djangojs' and file_ext == '.js':
+ if verbosity > 1:
+ sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
+ src = open(os.path.join(dirpath, file), "rb").read()
+ src = pythonize_re.sub('\n#', src)
+ open(os.path.join(dirpath, '%s.py' % file), "wb").write(src)
+ thefile = '%s.py' % file
+ cmd = 'xgettext -d %s -L Perl --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (domain, os.path.join(dirpath, thefile))
+ (stdin, stdout, stderr) = os.popen3(cmd, 't')
+ msgs = stdout.read()
+ errors = stderr.read()
+ if errors:
+ raise CommandError("errors happened while running xgettext on %s\n%s" % (file, errors))
+ old = '#: '+os.path.join(dirpath, thefile)[2:]
+ new = '#: '+os.path.join(dirpath, file)[2:]
+ msgs = msgs.replace(old, new)
+ if os.path.exists(potfile):
+ # Strip the header
+ msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
+ else:
+ msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
+ if msgs:
+ open(potfile, 'ab').write(msgs)
+ os.unlink(os.path.join(dirpath, thefile))
+ elif domain == 'django' and (file_ext == '.py' or file_ext in extensions):
+ thefile = file
+ if file_ext in extensions:
+ src = open(os.path.join(dirpath, file), "rb").read()
+ thefile = '%s.py' % file
+ open(os.path.join(dirpath, thefile), "wb").write(templatize(src))
+ if verbosity > 1:
+ sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
+ cmd = 'xgettext -d %s -L Python --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --keyword=ugettext_noop --keyword=ugettext_lazy --keyword=ungettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (
+ domain, os.path.join(dirpath, thefile))
+ (stdin, stdout, stderr) = os.popen3(cmd, 't')
+ msgs = stdout.read()
+ errors = stderr.read()
+ if errors:
+ raise CommandError("errors happened while running xgettext on %s\n%s" % (file, errors))
+ if thefile != file:
+ old = '#: '+os.path.join(dirpath, thefile)[2:]
+ new = '#: '+os.path.join(dirpath, file)[2:]
+ msgs = msgs.replace(old, new)
+ if os.path.exists(potfile):
+ # Strip the header
+ msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
+ else:
+ msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
+ if msgs:
+ open(potfile, 'ab').write(msgs)
+ if thefile != file:
+ os.unlink(os.path.join(dirpath, thefile))
+
+ if os.path.exists(potfile):
+ (stdin, stdout, stderr) = os.popen3('msguniq --to-code=utf-8 "%s"' % potfile, 'b')
+ msgs = stdout.read()
+ errors = stderr.read()
+ if errors:
+ raise CommandError("errors happened while running msguniq\n%s" % errors)
+ open(potfile, 'w').write(msgs)
+ if os.path.exists(pofile):
+ (stdin, stdout, stderr) = os.popen3('msgmerge -q "%s" "%s"' % (pofile, potfile), 'b')
+ msgs = stdout.read()
+ errors = stderr.read()
+ if errors:
+ raise CommandError("errors happened while running msgmerge\n%s" % errors)
+ open(pofile, 'wb').write(msgs)
+ os.unlink(potfile)
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--locale', '-l', default=None, dest='locale',
+ help='Creates or updates the message files only for the given locale (e.g. pt_BR).'),
+ make_option('--domain', '-d', default='django', dest='domain',
+ help='The domain of the message files (default: "django").'),
+ make_option('--verbosity', '-v', action='store', dest='verbosity',
+ default='1', type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--all', '-a', action='store_true', dest='all',
+ default=False, help='Reexamines all source code and templates for new translation strings and updates all message files for all available languages.'),
+ make_option('--extension', '-e', dest='extensions',
+ help='The file extension(s) to examine (default: ".html", separate multiple extensions with commas, or use -e multiple times)',
+ action='append'),
+ )
+ help = "Runs over the entire source tree of the current directory and pulls out all strings marked for translation. It creates (or updates) a message file in the conf/locale (in the django tree) or locale (for project and application) directory."
+
+ requires_model_validation = False
+ can_import_settings = False
+
+ def handle(self, *args, **options):
+ if len(args) != 0:
+ raise CommandError("Command doesn't accept any arguments")
+
+ locale = options.get('locale')
+ domain = options.get('domain')
+ verbosity = int(options.get('verbosity'))
+ process_all = options.get('all')
+ extensions = options.get('extensions') or ['html']
+
+ if domain == 'djangojs':
+ extensions = []
+ else:
+ extensions = handle_extensions(extensions)
+
+ if '.js' in extensions:
+ raise CommandError("JavaScript files should be examined by using the special 'djangojs' domain only.")
+
+ make_messages(locale, domain, verbosity, process_all, extensions)
diff --git a/webapp/django/core/management/commands/reset.py b/webapp/django/core/management/commands/reset.py
new file mode 100644
index 0000000000..3e7ca9f689
--- /dev/null
+++ b/webapp/django/core/management/commands/reset.py
@@ -0,0 +1,52 @@
+from django.core.management.base import AppCommand, CommandError
+from django.core.management.color import no_style
+from optparse import make_option
+
+class Command(AppCommand):
+ option_list = AppCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ )
+ help = "Executes ``sqlreset`` for the given app(s) in the current database."
+ args = '[appname ...]'
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.db import connection, transaction
+ from django.conf import settings
+ from django.core.management.sql import sql_reset
+
+ app_name = app.__name__.split('.')[-2]
+
+ self.style = no_style()
+
+ sql_list = sql_reset(app, self.style)
+
+ if options.get('interactive'):
+ confirm = raw_input("""
+You have requested a database reset.
+This will IRREVERSIBLY DESTROY any data for
+the "%s" application in the database "%s".
+Are you sure you want to do this?
+
+Type 'yes' to continue, or 'no' to cancel: """ % (app_name, settings.DATABASE_NAME))
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ transaction.rollback_unless_managed()
+ raise CommandError("""Error: %s couldn't be reset. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the database tables doesn't exist.
+ * The SQL was invalid.
+Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
+The full error: %s""" % (app_name, app_name, e))
+ transaction.commit_unless_managed()
+ else:
+ print "Reset cancelled."
diff --git a/webapp/django/core/management/commands/runfcgi.py b/webapp/django/core/management/commands/runfcgi.py
new file mode 100644
index 0000000000..a60d4ebc59
--- /dev/null
+++ b/webapp/django/core/management/commands/runfcgi.py
@@ -0,0 +1,20 @@
+from django.core.management.base import BaseCommand
+
+class Command(BaseCommand):
+ help = "Runs this project as a FastCGI application. Requires flup."
+ args = '[various KEY=val options, use `runfcgi help` for help]'
+
+ def handle(self, *args, **options):
+ from django.conf import settings
+ from django.utils import translation
+ # Activate the current language, because it won't get activated later.
+ try:
+ translation.activate(settings.LANGUAGE_CODE)
+ except AttributeError:
+ pass
+ from django.core.servers.fastcgi import runfastcgi
+ runfastcgi(args)
+
+ def usage(self, subcommand):
+ from django.core.servers.fastcgi import FASTCGI_HELP
+ return FASTCGI_HELP
diff --git a/webapp/django/core/management/commands/runserver.py b/webapp/django/core/management/commands/runserver.py
new file mode 100644
index 0000000000..12808bcaad
--- /dev/null
+++ b/webapp/django/core/management/commands/runserver.py
@@ -0,0 +1,77 @@
+from django.core.management.base import BaseCommand, CommandError
+from optparse import make_option
+import os
+import sys
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--noreload', action='store_false', dest='use_reloader', default=True,
+ help='Tells Django to NOT use the auto-reloader.'),
+ make_option('--adminmedia', dest='admin_media_path', default='',
+ help='Specifies the directory from which to serve admin media.'),
+ )
+ help = "Starts a lightweight Web server for development."
+ args = '[optional port number, or ipaddr:port]'
+
+ # Validation is called explicitly each time the server is reloaded.
+ requires_model_validation = False
+
+ def handle(self, addrport='', *args, **options):
+ import django
+ from django.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException
+ from django.core.handlers.wsgi import WSGIHandler
+ if args:
+ raise CommandError('Usage is runserver %s' % self.args)
+ if not addrport:
+ addr = ''
+ port = '8000'
+ else:
+ try:
+ addr, port = addrport.split(':')
+ except ValueError:
+ addr, port = '', addrport
+ if not addr:
+ addr = '127.0.0.1'
+
+ if not port.isdigit():
+ raise CommandError("%r is not a valid port number." % port)
+
+ use_reloader = options.get('use_reloader', True)
+ admin_media_path = options.get('admin_media_path', '')
+ shutdown_message = options.get('shutdown_message', '')
+ quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
+
+ def inner_run():
+ from django.conf import settings
+ print "Validating models..."
+ self.validate(display_num_errors=True)
+ print "\nDjango version %s, using settings %r" % (django.get_version(), settings.SETTINGS_MODULE)
+ print "Development server is running at http://%s:%s/" % (addr, port)
+ print "Quit the server with %s." % quit_command
+ try:
+ path = admin_media_path or django.__path__[0] + '/contrib/admin/media'
+ handler = AdminMediaHandler(WSGIHandler(), path)
+ run(addr, int(port), handler)
+ except WSGIServerException, e:
+ # Use helpful error messages instead of ugly tracebacks.
+ ERRORS = {
+ 13: "You don't have permission to access that port.",
+ 98: "That port is already in use.",
+ 99: "That IP address can't be assigned-to.",
+ }
+ try:
+ error_text = ERRORS[e.args[0].args[0]]
+ except (AttributeError, KeyError):
+ error_text = str(e)
+ sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n')
+ # Need to use an OS exit because sys.exit doesn't work in a thread
+ os._exit(1)
+ except KeyboardInterrupt:
+ if shutdown_message:
+ print shutdown_message
+ sys.exit(0)
+ if use_reloader:
+ from django.utils import autoreload
+ autoreload.main(inner_run)
+ else:
+ inner_run()
diff --git a/webapp/django/core/management/commands/shell.py b/webapp/django/core/management/commands/shell.py
new file mode 100644
index 0000000000..96169020e5
--- /dev/null
+++ b/webapp/django/core/management/commands/shell.py
@@ -0,0 +1,59 @@
+import os
+from django.core.management.base import NoArgsCommand
+from optparse import make_option
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--plain', action='store_true', dest='plain',
+ help='Tells Django to use plain Python, not IPython.'),
+ )
+ help = "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ # XXX: (Temporary) workaround for ticket #1796: force early loading of all
+ # models from installed apps.
+ from django.db.models.loading import get_models
+ loaded_models = get_models()
+
+ use_plain = options.get('plain', False)
+
+ try:
+ if use_plain:
+ # Don't bother loading IPython, because the user wants plain Python.
+ raise ImportError
+ import IPython
+ # Explicitly pass an empty list as arguments, because otherwise IPython
+ # would use sys.argv from this script.
+ shell = IPython.Shell.IPShell(argv=[])
+ shell.mainloop()
+ except ImportError:
+ import code
+ # Set up a dictionary to serve as the environment for the shell, so
+ # that tab completion works on objects that are imported at runtime.
+ # See ticket 5082.
+ imported_objects = {}
+ try: # Try activating rlcompleter, because it's handy.
+ import readline
+ except ImportError:
+ pass
+ else:
+ # We don't have to wrap the following import in a 'try', because
+ # we already know 'readline' was imported successfully.
+ import rlcompleter
+ readline.set_completer(rlcompleter.Completer(imported_objects).complete)
+ readline.parse_and_bind("tab:complete")
+
+ # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
+ # conventions and get $PYTHONSTARTUP first then import user.
+ if not use_plain:
+ pythonrc = os.environ.get("PYTHONSTARTUP")
+ if pythonrc and os.path.isfile(pythonrc):
+ try:
+ execfile(pythonrc)
+ except NameError:
+ pass
+ # This will import .pythonrc.py as a side-effect
+ import user
+ code.interact(local=imported_objects)
diff --git a/webapp/django/core/management/commands/sql.py b/webapp/django/core/management/commands/sql.py
new file mode 100644
index 0000000000..bc68a1e43a
--- /dev/null
+++ b/webapp/django/core/management/commands/sql.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE SQL statements for the given app name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_create
+ return u'\n'.join(sql_create(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlall.py b/webapp/django/core/management/commands/sqlall.py
new file mode 100644
index 0000000000..5d510f179a
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlall.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_all
+ return u'\n'.join(sql_all(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlclear.py b/webapp/django/core/management/commands/sqlclear.py
new file mode 100644
index 0000000000..8550e88e28
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlclear.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the DROP TABLE SQL statements for the given app name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_delete
+ return u'\n'.join(sql_delete(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlcustom.py b/webapp/django/core/management/commands/sqlcustom.py
new file mode 100644
index 0000000000..465330db7e
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlcustom.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the custom table modifying SQL statements for the given app name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_custom
+ return u'\n'.join(sql_custom(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlflush.py b/webapp/django/core/management/commands/sqlflush.py
new file mode 100644
index 0000000000..d0f71d3875
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlflush.py
@@ -0,0 +1,10 @@
+from django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
+
+ output_transaction = True
+
+ def handle_noargs(self, **options):
+ from django.core.management.sql import sql_flush
+ return u'\n'.join(sql_flush(self.style, only_django=True)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlindexes.py b/webapp/django/core/management/commands/sqlindexes.py
new file mode 100644
index 0000000000..9693588a89
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlindexes.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_indexes
+ return u'\n'.join(sql_indexes(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlinitialdata.py b/webapp/django/core/management/commands/sqlinitialdata.py
new file mode 100644
index 0000000000..b9e2249c29
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlinitialdata.py
@@ -0,0 +1,7 @@
+from django.core.management.base import AppCommand, CommandError
+
+class Command(AppCommand):
+ help = "RENAMED: see 'sqlcustom'"
+
+ def handle(self, *apps, **options):
+ raise CommandError("This command has been renamed. Use the 'sqlcustom' command instead.")
diff --git a/webapp/django/core/management/commands/sqlreset.py b/webapp/django/core/management/commands/sqlreset.py
new file mode 100644
index 0000000000..ec40848c42
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlreset.py
@@ -0,0 +1,10 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = "Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s)."
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.core.management.sql import sql_reset
+ return u'\n'.join(sql_reset(app, self.style)).encode('utf-8')
diff --git a/webapp/django/core/management/commands/sqlsequencereset.py b/webapp/django/core/management/commands/sqlsequencereset.py
new file mode 100644
index 0000000000..e8dad0bef6
--- /dev/null
+++ b/webapp/django/core/management/commands/sqlsequencereset.py
@@ -0,0 +1,9 @@
+from django.core.management.base import AppCommand
+
+class Command(AppCommand):
+ help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ from django.db import connection, models
+ return u'\n'.join(connection.ops.sequence_reset_sql(self.style, models.get_models(app))).encode('utf-8')
diff --git a/webapp/django/core/management/commands/startapp.py b/webapp/django/core/management/commands/startapp.py
new file mode 100644
index 0000000000..a81c427142
--- /dev/null
+++ b/webapp/django/core/management/commands/startapp.py
@@ -0,0 +1,46 @@
+import os
+
+from django.core.management.base import copy_helper, CommandError, LabelCommand
+
+class Command(LabelCommand):
+ help = "Creates a Django app directory structure for the given app name in the current directory."
+ args = "[appname]"
+ label = 'application name'
+
+ requires_model_validation = False
+ # Can't import settings during this command, because they haven't
+ # necessarily been created.
+ can_import_settings = False
+
+ def handle_label(self, app_name, directory=None, **options):
+ if directory is None:
+ directory = os.getcwd()
+
+ # Determine the project_name by using the basename of directory,
+ # which should be the full path of the project directory (or the
+ # current directory if no directory was passed).
+ project_name = os.path.basename(directory)
+ if app_name == project_name:
+ raise CommandError("You cannot create an app with the same name"
+ " (%r) as your project." % app_name)
+
+ # Check that the app_name cannot be imported.
+ try:
+ __import__(app_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as an app name. Please try another name." % app_name)
+
+ copy_helper(self.style, 'app', app_name, directory, project_name)
+
+class ProjectCommand(Command):
+ help = ("Creates a Django app directory structure for the given app name"
+ " in this project's directory.")
+
+ def __init__(self, project_directory):
+ super(ProjectCommand, self).__init__()
+ self.project_directory = project_directory
+
+ def handle_label(self, app_name, **options):
+ super(ProjectCommand, self).handle_label(app_name, self.project_directory, **options)
diff --git a/webapp/django/core/management/commands/startproject.py b/webapp/django/core/management/commands/startproject.py
new file mode 100644
index 0000000000..540a64d2ea
--- /dev/null
+++ b/webapp/django/core/management/commands/startproject.py
@@ -0,0 +1,38 @@
+from django.core.management.base import copy_helper, CommandError, LabelCommand
+import os
+import re
+from random import choice
+
+class Command(LabelCommand):
+ help = "Creates a Django project directory structure for the given project name in the current directory."
+ args = "[projectname]"
+ label = 'project name'
+
+ requires_model_validation = False
+ # Can't import settings during this command, because they haven't
+ # necessarily been created.
+ can_import_settings = False
+
+ def handle_label(self, project_name, **options):
+ # Determine the project_name a bit naively -- by looking at the name of
+ # the parent directory.
+ directory = os.getcwd()
+
+ # Check that the project_name cannot be imported.
+ try:
+ __import__(project_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name." % project_name)
+
+ copy_helper(self.style, 'project', project_name, directory)
+
+ # Create a random SECRET_KEY hash, and put it in the main settings.
+ main_settings_file = os.path.join(directory, project_name, 'settings.py')
+ settings_contents = open(main_settings_file, 'r').read()
+ fp = open(main_settings_file, 'w')
+ secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
+ settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
+ fp.write(settings_contents)
+ fp.close()
diff --git a/webapp/django/core/management/commands/syncdb.py b/webapp/django/core/management/commands/syncdb.py
new file mode 100644
index 0000000000..7aeed4971e
--- /dev/null
+++ b/webapp/django/core/management/commands/syncdb.py
@@ -0,0 +1,152 @@
+from django.core.management.base import NoArgsCommand
+from django.core.management.color import no_style
+from optparse import make_option
+import sys
+
+try:
+ set
+except NameError:
+ from sets import Set as set # Python 2.3 fallback
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ )
+ help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+
+ def handle_noargs(self, **options):
+ from django.db import connection, transaction, models
+ from django.conf import settings
+ from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal
+
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive')
+ show_traceback = options.get('traceback', False)
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ __import__(app_name + '.management', {}, {}, [''])
+ except ImportError, exc:
+ # This is slightly hackish. We want to ignore ImportErrors
+ # if the "management" module itself is missing -- but we don't
+ # want to ignore the exception if the management module exists
+ # but raises an ImportError for some reason. The only way we
+ # can do this is to check the text of the exception. Note that
+ # we're a bit broad in how we check the text, because different
+ # Python implementations may not use the same text.
+ # CPython uses the text "No module named management"
+ # PyPy uses "No module named myproject.myapp.management"
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+
+ cursor = connection.cursor()
+
+ # Get a list of already installed *models* so that references work right.
+ tables = connection.introspection.table_names()
+ seen_models = connection.introspection.installed_models(tables)
+ created_models = set()
+ pending_references = {}
+
+ # Create the tables for each model
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ model_list = models.get_models(app)
+ for model in model_list:
+ # Create the model's database table, if it doesn't already exist.
+ if verbosity >= 2:
+ print "Processing %s.%s model" % (app_name, model._meta.object_name)
+ if connection.introspection.table_name_converter(model._meta.db_table) in tables:
+ continue
+ sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
+ seen_models.add(model)
+ created_models.add(model)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in seen_models:
+ sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references))
+ sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
+ if verbosity >= 1:
+ print "Creating table %s" % model._meta.db_table
+ for statement in sql:
+ cursor.execute(statement)
+ tables.append(connection.introspection.table_name_converter(model._meta.db_table))
+
+ # Create the m2m tables. This must be done after all tables have been created
+ # to ensure that all referred tables will exist.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ model_list = models.get_models(app)
+ for model in model_list:
+ if model in created_models:
+ sql = connection.creation.sql_for_many_to_many(model, self.style)
+ if sql:
+ if verbosity >= 2:
+ print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name)
+ for statement in sql:
+ cursor.execute(statement)
+
+ transaction.commit_unless_managed()
+
+ # Send the post_syncdb signal, so individual apps can do whatever they need
+ # to do at this point.
+ emit_post_sync_signal(created_models, verbosity, interactive)
+
+ # The connection may have been closed by a syncdb handler.
+ cursor = connection.cursor()
+
+ # Install custom SQL for the app (but only if this
+ # is a model we've just created)
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ for model in models.get_models(app):
+ if model in created_models:
+ custom_sql = custom_sql_for_model(model, self.style)
+ if custom_sql:
+ if verbosity >= 1:
+ print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in custom_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \
+ (app_name, model._meta.object_name, e))
+ if show_traceback:
+ import traceback
+ traceback.print_exc()
+ transaction.rollback_unless_managed()
+ else:
+ transaction.commit_unless_managed()
+ else:
+ if verbosity >= 2:
+ print "No custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+ # Install SQL indicies for all newly created models
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ for model in models.get_models(app):
+ if model in created_models:
+ index_sql = connection.creation.sql_indexes_for_model(model, self.style)
+ if index_sql:
+ if verbosity >= 1:
+ print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in index_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install index for %s.%s model: %s\n" % \
+ (app_name, model._meta.object_name, e))
+ transaction.rollback_unless_managed()
+ else:
+ transaction.commit_unless_managed()
+
+ # Install the 'initial_data' fixture, using format discovery
+ from django.core.management import call_command
+ call_command('loaddata', 'initial_data', verbosity=verbosity)
diff --git a/webapp/django/core/management/commands/test.py b/webapp/django/core/management/commands/test.py
new file mode 100644
index 0000000000..ef7b197b00
--- /dev/null
+++ b/webapp/django/core/management/commands/test.py
@@ -0,0 +1,35 @@
+from django.core.management.base import BaseCommand
+from optparse import make_option
+import sys
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ )
+ help = 'Runs the test suite for the specified applications, or the entire site if no apps are specified.'
+ args = '[appname ...]'
+
+ requires_model_validation = False
+
+ def handle(self, *test_labels, **options):
+ from django.conf import settings
+
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive', True)
+
+ test_path = settings.TEST_RUNNER.split('.')
+ # Allow for Python 2.5 relative paths
+ if len(test_path) > 1:
+ test_module_name = '.'.join(test_path[:-1])
+ else:
+ test_module_name = '.'
+ test_module = __import__(test_module_name, {}, {}, test_path[-1])
+ test_runner = getattr(test_module, test_path[-1])
+
+ failures = test_runner(test_labels, verbosity=verbosity, interactive=interactive)
+ if failures:
+ sys.exit(failures)
diff --git a/webapp/django/core/management/commands/testserver.py b/webapp/django/core/management/commands/testserver.py
new file mode 100644
index 0000000000..78983e73d6
--- /dev/null
+++ b/webapp/django/core/management/commands/testserver.py
@@ -0,0 +1,36 @@
+from django.core.management.base import BaseCommand
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--addrport', action='store', dest='addrport',
+ type='string', default='',
+ help='port number or ipaddr:port to run the server on'),
+ )
+ help = 'Runs a development server with data from the given fixture(s).'
+ args = '[fixture ...]'
+
+ requires_model_validation = False
+
+ def handle(self, *fixture_labels, **options):
+ from django.core.management import call_command
+ from django.db import connection
+
+ verbosity = int(options.get('verbosity', 1))
+ addrport = options.get('addrport')
+
+ # Create a test database.
+ db_name = connection.creation.create_test_db(verbosity=verbosity)
+
+ # Import the fixture data into the test database.
+ call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
+
+ # Run the development server. Turn off auto-reloading because it causes
+ # a strange error -- it causes this handle() method to be called
+ # multiple times.
+ shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
+ call_command('runserver', addrport=addrport, shutdown_message=shutdown_message, use_reloader=False)
diff --git a/webapp/django/core/management/commands/validate.py b/webapp/django/core/management/commands/validate.py
new file mode 100644
index 0000000000..760d41c5bf
--- /dev/null
+++ b/webapp/django/core/management/commands/validate.py
@@ -0,0 +1,9 @@
+from django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Validates all installed models."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ self.validate(display_num_errors=True)
diff --git a/webapp/django/core/management/sql.py b/webapp/django/core/management/sql.py
new file mode 100644
index 0000000000..4874a49dde
--- /dev/null
+++ b/webapp/django/core/management/sql.py
@@ -0,0 +1,205 @@
+from django.core.management.base import CommandError
+import os
+import re
+
+try:
+ set
+except NameError:
+ from sets import Set as set # Python 2.3 fallback
+
+def sql_create(app, style):
+ "Returns a list of the CREATE TABLE SQL statements for the given app."
+ from django.db import connection, models
+ from django.conf import settings
+
+ if settings.DATABASE_ENGINE == 'dummy':
+ # This must be the "dummy" database backend, which means the user
+ # hasn't set DATABASE_ENGINE.
+ raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
+ "because you haven't specified the DATABASE_ENGINE setting.\n" +
+ "Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.")
+
+ # Get installed models, so we generate REFERENCES right.
+ # We trim models from the current app so that the sqlreset command does not
+ # generate invalid SQL (leaving models out of known_models is harmless, so
+ # we can be conservative).
+ app_models = models.get_models(app)
+ final_output = []
+ tables = connection.introspection.table_names()
+ known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
+ pending_references = {}
+
+ for model in app_models:
+ output, references = connection.creation.sql_create_model(model, style, known_models)
+ final_output.extend(output)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in known_models:
+ final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
+ final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
+ # Keep track of the fact that we've created the table for this model.
+ known_models.add(model)
+
+ # Create the many-to-many join tables.
+ for model in app_models:
+ final_output.extend(connection.creation.sql_for_many_to_many(model, style))
+
+ # Handle references to tables that are from other apps
+ # but don't exist physically.
+ not_installed_models = set(pending_references.keys())
+ if not_installed_models:
+ alter_sql = []
+ for model in not_installed_models:
+ alter_sql.extend(['-- ' + sql for sql in
+ connection.creation.sql_for_pending_references(model, style, pending_references)])
+ if alter_sql:
+ final_output.append('-- The following references should be added but depend on non-existent tables:')
+ final_output.extend(alter_sql)
+
+ return final_output
+
+def sql_delete(app, style):
+ "Returns a list of the DROP TABLE SQL statements for the given app."
+ from django.db import connection, models
+ from django.db.backends.util import truncate_name
+ from django.contrib.contenttypes import generic
+
+ # This should work even if a connection isn't available
+ try:
+ cursor = connection.cursor()
+ except:
+ cursor = None
+
+ # Figure out which tables already exist
+ if cursor:
+ table_names = connection.introspection.get_table_list(cursor)
+ else:
+ table_names = []
+
+ output = []
+
+ # Output DROP TABLE statements for standard application tables.
+ to_delete = set()
+
+ references_to_delete = {}
+ app_models = models.get_models(app)
+ for model in app_models:
+ if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ # The table exists, so it needs to be dropped
+ opts = model._meta
+ for f in opts.local_fields:
+ if f.rel and f.rel.to not in to_delete:
+ references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
+
+ to_delete.add(model)
+
+ for model in app_models:
+ if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
+
+ # Output DROP TABLE statements for many-to-many tables.
+ for model in app_models:
+ opts = model._meta
+ for f in opts.local_many_to_many:
+ if cursor and connection.introspection.table_name_converter(f.m2m_db_table()) in table_names:
+ output.extend(connection.creation.sql_destroy_many_to_many(model, f, style))
+
+ # Close database connection explicitly, in case this output is being piped
+ # directly into a database client, to avoid locking issues.
+ if cursor:
+ cursor.close()
+ connection.close()
+
+ return output[::-1] # Reverse it, to deal with table dependencies.
+
+def sql_reset(app, style):
+ "Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
+ return sql_delete(app, style) + sql_all(app, style)
+
+def sql_flush(style, only_django=False):
+ """
+ Returns a list of the SQL statements used to flush the database.
+
+ If only_django is True, then only table names that have associated Django
+ models and are in INSTALLED_APPS will be included.
+ """
+ from django.db import connection
+ if only_django:
+ tables = connection.introspection.django_table_names()
+ else:
+ tables = connection.introspection.table_names()
+ statements = connection.ops.sql_flush(style, tables, connection.introspection.sequence_list())
+ return statements
+
+def sql_custom(app, style):
+ "Returns a list of the custom table modifying SQL statements for the given app."
+ from django.db.models import get_models
+ output = []
+
+ app_models = get_models(app)
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
+
+ for model in app_models:
+ output.extend(custom_sql_for_model(model, style))
+
+ return output
+
+def sql_indexes(app, style):
+ "Returns a list of the CREATE INDEX SQL statements for all models in the given app."
+ from django.db import connection, models
+ output = []
+ for model in models.get_models(app):
+ output.extend(connection.creation.sql_indexes_for_model(model, style))
+ return output
+
+def sql_all(app, style):
+ "Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
+ return sql_create(app, style) + sql_custom(app, style) + sql_indexes(app, style)
+
+def custom_sql_for_model(model, style):
+ from django.db import models
+ from django.conf import settings
+
+ opts = model._meta
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
+ output = []
+
+ # Post-creation SQL should come before any initial SQL data is loaded.
+ # However, this should not be done for fields that are part of a a parent
+ # model (via model inheritance).
+ nm = opts.init_name_map()
+ post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
+ for f in post_sql_fields:
+ output.extend(f.post_create_sql(style, model._meta.db_table))
+
+ # Some backends can't execute more than one SQL statement at a time,
+ # so split into separate statements.
+ statements = re.compile(r";[ \t]*$", re.M)
+
+ # Find custom SQL, if it's available.
+ sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), settings.DATABASE_ENGINE)),
+ os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
+ for sql_file in sql_files:
+ if os.path.exists(sql_file):
+ fp = open(sql_file, 'U')
+ for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)):
+ # Remove any comments from the file
+ statement = re.sub(ur"--.*([\n\Z]|$)", "", statement)
+ if statement.strip():
+ output.append(statement + u";")
+ fp.close()
+
+ return output
+
+
+def emit_post_sync_signal(created_models, verbosity, interactive):
+ from django.db import models
+ from django.dispatch import dispatcher
+ # Emit the post_sync signal for every application.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ if verbosity >= 2:
+ print "Running post-sync handlers for application", app_name
+ models.signals.post_syncdb.send(sender=app, app=app,
+ created_models=created_models, verbosity=verbosity,
+ interactive=interactive)
diff --git a/webapp/django/core/management/validation.py b/webapp/django/core/management/validation.py
new file mode 100644
index 0000000000..0b3542c66d
--- /dev/null
+++ b/webapp/django/core/management/validation.py
@@ -0,0 +1,221 @@
+import sys
+from django.core.management.color import color_style
+from django.utils.itercompat import is_iterable
+
+class ModelErrorCollection:
+ def __init__(self, outfile=sys.stdout):
+ self.errors = []
+ self.outfile = outfile
+ self.style = color_style()
+
+ def add(self, context, error):
+ self.errors.append((context, error))
+ self.outfile.write(self.style.ERROR("%s: %s\n" % (context, error)))
+
+def get_validation_errors(outfile, app=None):
+ """
+ Validates all models that are part of the specified app. If no app name is provided,
+ validates all models of all installed apps. Writes errors, if any, to outfile.
+ Returns number of errors.
+ """
+ from django.conf import settings
+ from django.db import models, connection
+ from django.db.models.loading import get_app_errors
+ from django.db.models.fields.related import RelatedObject
+
+ e = ModelErrorCollection(outfile)
+
+ for (app_name, error) in get_app_errors().items():
+ e.add(app_name, error)
+
+ for cls in models.get_models(app):
+ opts = cls._meta
+
+ # Do field-specific validation.
+ for f in opts.local_fields:
+ if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
+ e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
+ if f.name.endswith('_'):
+ e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name)
+ if isinstance(f, models.CharField) and f.max_length in (None, 0):
+ e.add(opts, '"%s": CharFields require a "max_length" attribute.' % f.name)
+ if isinstance(f, models.DecimalField):
+ if f.decimal_places is None:
+ e.add(opts, '"%s": DecimalFields require a "decimal_places" attribute.' % f.name)
+ if f.max_digits is None:
+ e.add(opts, '"%s": DecimalFields require a "max_digits" attribute.' % f.name)
+ if isinstance(f, models.FileField) and not f.upload_to:
+ e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
+ if isinstance(f, models.ImageField):
+ try:
+ from PIL import Image
+ except ImportError:
+ e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
+ if f.choices:
+ if isinstance(f.choices, basestring) or not is_iterable(f.choices):
+ e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
+ else:
+ for c in f.choices:
+ if not type(c) in (tuple, list) or len(c) != 2:
+ e.add(opts, '"%s": "choices" should be a sequence of two-tuples.' % f.name)
+ if f.db_index not in (None, True, False):
+ e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
+
+ # Perform any backend-specific field validation.
+ connection.validation.validate_field(e, opts, f)
+
+ # Check to see if the related field will clash with any existing
+ # fields, m2m fields, m2m related objects or related objects
+ if f.rel:
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, (str, unicode)):
+ continue
+
+ rel_opts = f.rel.to._meta
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ seen_intermediary_signatures = []
+ for i, f in enumerate(opts.local_many_to_many):
+ # Check to see if the related m2m field will clash with any
+ # existing fields, m2m fields, m2m related objects or related
+ # objects
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, (str, unicode)):
+ continue
+
+ # Check that the field is not set to unique. ManyToManyFields do not support unique.
+ if f.unique:
+ e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name)
+
+ if getattr(f.rel, 'through', None) is not None:
+ if hasattr(f.rel, 'through_model'):
+ from_model, to_model = cls, f.rel.to
+ if from_model == to_model and f.rel.symmetrical:
+ e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.")
+ seen_from, seen_to, seen_self = False, False, 0
+ for inter_field in f.rel.through_model._meta.fields:
+ rel_to = getattr(inter_field.rel, 'to', None)
+ if from_model == to_model: # relation to self
+ if rel_to == from_model:
+ seen_self += 1
+ if seen_self > 2:
+ e.add(opts, "Intermediary model %s has more than two foreign keys to %s, which is ambiguous and is not permitted." % (f.rel.through_model._meta.object_name, from_model._meta.object_name))
+ else:
+ if rel_to == from_model:
+ if seen_from:
+ e.add(opts, "Intermediary model %s has more than one foreign key to %s, which is ambiguous and is not permitted." % (f.rel.through_model._meta.object_name, rel_from._meta.object_name))
+ else:
+ seen_from = True
+ elif rel_to == to_model:
+ if seen_to:
+ e.add(opts, "Intermediary model %s has more than one foreign key to %s, which is ambiguous and is not permitted." % (f.rel.through_model._meta.object_name, rel_to._meta.object_name))
+ else:
+ seen_to = True
+ if f.rel.through_model not in models.get_models():
+ e.add(opts, "'%s' specifies an m2m relation through model %s, which has not been installed." % (f.name, f.rel.through))
+ signature = (f.rel.to, cls, f.rel.through_model)
+ if signature in seen_intermediary_signatures:
+ e.add(opts, "The model %s has two manually-defined m2m relations through the model %s, which is not permitted. Please consider using an extra field on your intermediary model instead." % (cls._meta.object_name, f.rel.through_model._meta.object_name))
+ else:
+ seen_intermediary_signatures.append(signature)
+ seen_related_fk, seen_this_fk = False, False
+ for field in f.rel.through_model._meta.fields:
+ if field.rel:
+ if not seen_related_fk and field.rel.to == f.rel.to:
+ seen_related_fk = True
+ elif field.rel.to == cls:
+ seen_this_fk = True
+ if not seen_related_fk or not seen_this_fk:
+ e.add(opts, "'%s' has a manually-defined m2m relation through model %s, which does not have foreign keys to %s and %s" % (f.name, f.rel.through, f.rel.to._meta.object_name, cls._meta.object_name))
+ else:
+ e.add(opts, "'%s' specifies an m2m relation through model %s, which has not been installed" % (f.name, f.rel.through))
+
+ rel_opts = f.rel.to._meta
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ # If rel_name is none, there is no reverse accessor (this only
+ # occurs for symmetrical m2m relations to self). If this is the
+ # case, there are no clashes to check for this field, as there are
+ # no reverse descriptors for this field.
+ if rel_name is not None:
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ # Check ordering attribute.
+ if opts.ordering:
+ for field_name in opts.ordering:
+ if field_name == '?': continue
+ if field_name.startswith('-'):
+ field_name = field_name[1:]
+ if opts.order_with_respect_to and field_name == '_order':
+ continue
+ # Skip ordering in the format field1__field2 (FIXME: checking
+ # this format would be nice, but it's a little fiddly).
+ if '_' in field_name:
+ continue
+ try:
+ opts.get_field(field_name, many_to_many=False)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
+
+ # Check unique_together.
+ for ut in opts.unique_together:
+ for field_name in ut:
+ try:
+ f = opts.get_field(field_name, many_to_many=True)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name)
+ else:
+ if isinstance(f.rel, models.ManyToManyRel):
+ e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name)
+ if f not in opts.local_fields:
+ e.add(opts, '"unique_together" refers to %s. This is not in the same model as the unique_together statement.' % f.name)
+
+ return len(e.errors)
diff --git a/webapp/django/core/paginator.py b/webapp/django/core/paginator.py
new file mode 100644
index 0000000000..495cdf2d76
--- /dev/null
+++ b/webapp/django/core/paginator.py
@@ -0,0 +1,120 @@
+from math import ceil
+
+class InvalidPage(Exception):
+ pass
+
+class PageNotAnInteger(InvalidPage):
+ pass
+
+class EmptyPage(InvalidPage):
+ pass
+
+class Paginator(object):
+ def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True):
+ self.object_list = object_list
+ self.per_page = per_page
+ self.orphans = orphans
+ self.allow_empty_first_page = allow_empty_first_page
+ self._num_pages = self._count = None
+
+ def validate_number(self, number):
+ "Validates the given 1-based page number."
+ try:
+ number = int(number)
+ except ValueError:
+ raise PageNotAnInteger('That page number is not an integer')
+ if number < 1:
+ raise EmptyPage('That page number is less than 1')
+ if number > self.num_pages:
+ if number == 1 and self.allow_empty_first_page:
+ pass
+ else:
+ raise EmptyPage('That page contains no results')
+ return number
+
+ def page(self, number):
+ "Returns a Page object for the given 1-based page number."
+ number = self.validate_number(number)
+ bottom = (number - 1) * self.per_page
+ top = bottom + self.per_page
+ if top + self.orphans >= self.count:
+ top = self.count
+ return Page(self.object_list[bottom:top], number, self)
+
+ def _get_count(self):
+ "Returns the total number of objects, across all pages."
+ if self._count is None:
+ try:
+ self._count = self.object_list.count()
+ except (AttributeError, TypeError):
+ # AttributeError if object_list has no count() method.
+ # TypeError if object_list.count() requires arguments
+ # (i.e. is of type list).
+ self._count = len(self.object_list)
+ return self._count
+ count = property(_get_count)
+
+ def _get_num_pages(self):
+ "Returns the total number of pages."
+ if self._num_pages is None:
+ if self.count == 0 and not self.allow_empty_first_page:
+ self._num_pages = 0
+ else:
+ hits = max(1, self.count - self.orphans)
+ self._num_pages = int(ceil(hits / float(self.per_page)))
+ return self._num_pages
+ num_pages = property(_get_num_pages)
+
+ def _get_page_range(self):
+ """
+ Returns a 1-based range of pages for iterating through within
+ a template for loop.
+ """
+ return range(1, self.num_pages + 1)
+ page_range = property(_get_page_range)
+
+QuerySetPaginator = Paginator # For backwards-compatibility.
+
+class Page(object):
+ def __init__(self, object_list, number, paginator):
+ self.object_list = object_list
+ self.number = number
+ self.paginator = paginator
+
+ def __repr__(self):
+ return '<Page %s of %s>' % (self.number, self.paginator.num_pages)
+
+ def has_next(self):
+ return self.number < self.paginator.num_pages
+
+ def has_previous(self):
+ return self.number > 1
+
+ def has_other_pages(self):
+ return self.has_previous() or self.has_next()
+
+ def next_page_number(self):
+ return self.number + 1
+
+ def previous_page_number(self):
+ return self.number - 1
+
+ def start_index(self):
+ """
+ Returns the 1-based index of the first object on this page,
+ relative to total objects in the paginator.
+ """
+ # Special case, return zero if no items.
+ if self.paginator.count == 0:
+ return 0
+ return (self.paginator.per_page * (self.number - 1)) + 1
+
+ def end_index(self):
+ """
+ Returns the 1-based index of the last object on this page,
+ relative to total objects found (hits).
+ """
+ # Special case for the last page because there can be orphans.
+ if self.number == self.paginator.num_pages:
+ return self.paginator.count
+ return self.number * self.paginator.per_page
diff --git a/webapp/django/core/serializers/__init__.py b/webapp/django/core/serializers/__init__.py
new file mode 100644
index 0000000000..5365efeacc
--- /dev/null
+++ b/webapp/django/core/serializers/__init__.py
@@ -0,0 +1,112 @@
+"""
+Interfaces for serializing Django objects.
+
+Usage::
+
+ from django.core import serializers
+ json = serializers.serialize("json", some_query_set)
+ objects = list(serializers.deserialize("json", json))
+
+To add your own serializers, use the SERIALIZATION_MODULES setting::
+
+ SERIALIZATION_MODULES = {
+ "csv" : "path.to.csv.serializer",
+ "txt" : "path.to.txt.serializer",
+ }
+
+"""
+
+from django.conf import settings
+
+# Built-in serializers
+BUILTIN_SERIALIZERS = {
+ "xml" : "django.core.serializers.xml_serializer",
+ "python" : "django.core.serializers.python",
+ "json" : "django.core.serializers.json",
+}
+
+# Check for PyYaml and register the serializer if it's available.
+try:
+ import yaml
+ BUILTIN_SERIALIZERS["yaml"] = "django.core.serializers.pyyaml"
+except ImportError:
+ pass
+
+_serializers = {}
+
+def register_serializer(format, serializer_module, serializers=None):
+ """"Register a new serializer.
+
+ ``serializer_module`` should be the fully qualified module name
+ for the serializer.
+
+ If ``serializers`` is provided, the registration will be added
+ to the provided dictionary.
+
+ If ``serializers`` is not provided, the registration will be made
+ directly into the global register of serializers. Adding serializers
+ directly is not a thread-safe operation.
+ """
+ module = __import__(serializer_module, {}, {}, [''])
+ if serializers is None:
+ _serializers[format] = module
+ else:
+ serializers[format] = module
+
+def unregister_serializer(format):
+ "Unregister a given serializer. This is not a thread-safe operation."
+ del _serializers[format]
+
+def get_serializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Serializer
+
+def get_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return _serializers.keys()
+
+def get_public_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return [k for k, v in _serializers.iteritems() if not v.Serializer.internal_use_only]
+
+def get_deserializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Deserializer
+
+def serialize(format, queryset, **options):
+ """
+ Serialize a queryset (or any iterator that returns database objects) using
+ a certain serializer.
+ """
+ s = get_serializer(format)()
+ s.serialize(queryset, **options)
+ return s.getvalue()
+
+def deserialize(format, stream_or_string):
+ """
+ Deserialize a stream or a string. Returns an iterator that yields ``(obj,
+ m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
+ object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
+ list_of_related_objects}``.
+ """
+ d = get_deserializer(format)
+ return d(stream_or_string)
+
+def _load_serializers():
+ """
+ Register built-in and settings-defined serializers. This is done lazily so
+ that user code has a chance to (e.g.) set up custom settings without
+ needing to be careful of import order.
+ """
+ global _serializers
+ serializers = {}
+ for format in BUILTIN_SERIALIZERS:
+ register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
+ if hasattr(settings, "SERIALIZATION_MODULES"):
+ for format in settings.SERIALIZATION_MODULES:
+ register_serializer(format, settings.SERIALIZATION_MODULES[format], serializers)
+ _serializers = serializers
diff --git a/webapp/django/core/serializers/base.py b/webapp/django/core/serializers/base.py
new file mode 100644
index 0000000000..bfd785a6fe
--- /dev/null
+++ b/webapp/django/core/serializers/base.py
@@ -0,0 +1,175 @@
+"""
+Module for abstract serializer/unserializer base classes.
+"""
+
+from StringIO import StringIO
+
+from django.db import models
+from django.utils.encoding import smart_str, smart_unicode
+from django.utils import datetime_safe
+
+class SerializationError(Exception):
+ """Something bad happened during serialization."""
+ pass
+
+class DeserializationError(Exception):
+ """Something bad happened during deserialization."""
+ pass
+
+class Serializer(object):
+ """
+ Abstract serializer base class.
+ """
+
+ # Indicates if the implemented serializer is only available for
+ # internal Django use.
+ internal_use_only = False
+
+ def serialize(self, queryset, **options):
+ """
+ Serialize a queryset.
+ """
+ self.options = options
+
+ self.stream = options.get("stream", StringIO())
+ self.selected_fields = options.get("fields")
+
+ self.start_serialization()
+ for obj in queryset:
+ self.start_object(obj)
+ for field in obj._meta.local_fields:
+ if field.serialize:
+ if field.rel is None:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_field(obj, field)
+ else:
+ if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
+ self.handle_fk_field(obj, field)
+ for field in obj._meta.many_to_many:
+ if field.serialize:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_m2m_field(obj, field)
+ self.end_object(obj)
+ self.end_serialization()
+ return self.getvalue()
+
+ def get_string_value(self, obj, field):
+ """
+ Convert a field's value to a string.
+ """
+ if isinstance(field, models.DateTimeField):
+ d = datetime_safe.new_datetime(getattr(obj, field.name))
+ value = d.strftime("%Y-%m-%d %H:%M:%S")
+ else:
+ value = field.flatten_data(follow=None, obj=obj).get(field.name, "")
+ return smart_unicode(value)
+
+ def start_serialization(self):
+ """
+ Called when serializing of the queryset starts.
+ """
+ raise NotImplementedError
+
+ def end_serialization(self):
+ """
+ Called when serializing of the queryset ends.
+ """
+ pass
+
+ def start_object(self, obj):
+ """
+ Called when serializing of an object starts.
+ """
+ raise NotImplementedError
+
+ def end_object(self, obj):
+ """
+ Called when serializing of an object ends.
+ """
+ pass
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each individual (non-relational) field on an object.
+ """
+ raise NotImplementedError
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey field.
+ """
+ raise NotImplementedError
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField.
+ """
+ raise NotImplementedError
+
+ def getvalue(self):
+ """
+ Return the fully serialized queryset (or None if the output stream is
+ not seekable).
+ """
+ if callable(getattr(self.stream, 'getvalue', None)):
+ return self.stream.getvalue()
+
+class Deserializer(object):
+ """
+ Abstract base deserializer class.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ """
+ Init this serializer given a stream or a string
+ """
+ self.options = options
+ if isinstance(stream_or_string, basestring):
+ self.stream = StringIO(stream_or_string)
+ else:
+ self.stream = stream_or_string
+ # hack to make sure that the models have all been loaded before
+ # deserialization starts (otherwise subclass calls to get_model()
+ # and friends might fail...)
+ models.get_apps()
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ """Iteration iterface -- return the next item in the stream"""
+ raise NotImplementedError
+
+class DeserializedObject(object):
+ """
+ A deserialized model.
+
+ Basically a container for holding the pre-saved deserialized data along
+ with the many-to-many data saved with the object.
+
+ Call ``save()`` to save the object (with the many-to-many data) to the
+ database; call ``save(save_m2m=False)`` to save just the object fields
+ (and not touch the many-to-many stuff.)
+ """
+
+ def __init__(self, obj, m2m_data=None):
+ self.object = obj
+ self.m2m_data = m2m_data
+
+ def __repr__(self):
+ return "<DeserializedObject: %s>" % smart_str(self.object)
+
+ def save(self, save_m2m=True):
+ # Call save on the Model baseclass directly. This bypasses any
+ # model-defined save. The save is also forced to be raw.
+ # This ensures that the data that is deserialized is literally
+ # what came from the file, not post-processed by pre_save/save
+ # methods.
+ models.Model.save_base(self.object, raw=True)
+ if self.m2m_data and save_m2m:
+ for accessor_name, object_list in self.m2m_data.items():
+ setattr(self.object, accessor_name, object_list)
+
+ # prevent a second (possibly accidental) call to save() from saving
+ # the m2m data twice.
+ self.m2m_data = None
diff --git a/webapp/django/core/serializers/json.py b/webapp/django/core/serializers/json.py
new file mode 100644
index 0000000000..97e5bc9b26
--- /dev/null
+++ b/webapp/django/core/serializers/json.py
@@ -0,0 +1,68 @@
+"""
+Serialize data to/from JSON
+"""
+
+import datetime
+from StringIO import StringIO
+
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+from django.utils import datetime_safe
+from django.utils import simplejson
+
+try:
+ import decimal
+except ImportError:
+ from django.utils import _decimal as decimal # Python 2.3 fallback
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to JSON.
+ """
+ internal_use_only = False
+
+ def end_serialization(self):
+ self.options.pop('stream', None)
+ self.options.pop('fields', None)
+ simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options)
+
+ def getvalue(self):
+ if callable(getattr(self.stream, 'getvalue', None)):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of JSON data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(simplejson.load(stream)):
+ yield obj
+
+class DjangoJSONEncoder(simplejson.JSONEncoder):
+ """
+ JSONEncoder subclass that knows how to encode date/time and decimal types.
+ """
+
+ DATE_FORMAT = "%Y-%m-%d"
+ TIME_FORMAT = "%H:%M:%S"
+
+ def default(self, o):
+ if isinstance(o, datetime.datetime):
+ d = datetime_safe.new_datetime(o)
+ return d.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
+ elif isinstance(o, datetime.date):
+ d = datetime_safe.new_date(o)
+ return d.strftime(self.DATE_FORMAT)
+ elif isinstance(o, datetime.time):
+ return o.strftime(self.TIME_FORMAT)
+ elif isinstance(o, decimal.Decimal):
+ return str(o)
+ else:
+ return super(DjangoJSONEncoder, self).default(o)
+
+# Older, deprecated class name (for backwards compatibility purposes).
+DateTimeAwareJSONEncoder = DjangoJSONEncoder
+
diff --git a/webapp/django/core/serializers/python.py b/webapp/django/core/serializers/python.py
new file mode 100644
index 0000000000..c129c068df
--- /dev/null
+++ b/webapp/django/core/serializers/python.py
@@ -0,0 +1,108 @@
+"""
+A Python "serializer". Doesn't do much serializing per se -- just converts to
+and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
+other serializers.
+"""
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models
+from django.utils.encoding import smart_unicode
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to basic Python objects.
+ """
+
+ internal_use_only = True
+
+ def start_serialization(self):
+ self._current = None
+ self.objects = []
+
+ def end_serialization(self):
+ pass
+
+ def start_object(self, obj):
+ self._current = {}
+
+ def end_object(self, obj):
+ self.objects.append({
+ "model" : smart_unicode(obj._meta),
+ "pk" : smart_unicode(obj._get_pk_val(), strings_only=True),
+ "fields" : self._current
+ })
+ self._current = None
+
+ def handle_field(self, obj, field):
+ self._current[field.name] = smart_unicode(getattr(obj, field.name), strings_only=True)
+
+ def handle_fk_field(self, obj, field):
+ related = getattr(obj, field.name)
+ if related is not None:
+ if field.rel.field_name == related._meta.pk.name:
+ # Related to remote object via primary key
+ related = related._get_pk_val()
+ else:
+ # Related to remote object via other field
+ related = getattr(related, field.rel.field_name)
+ self._current[field.name] = smart_unicode(related, strings_only=True)
+
+ def handle_m2m_field(self, obj, field):
+ if field.creates_table:
+ self._current[field.name] = [smart_unicode(related._get_pk_val(), strings_only=True)
+ for related in getattr(obj, field.name).iterator()]
+
+ def getvalue(self):
+ return self.objects
+
+def Deserializer(object_list, **options):
+ """
+ Deserialize simple Python objects back into Django ORM instances.
+
+ It's expected that you pass the Python objects themselves (instead of a
+ stream or a string) to the constructor
+ """
+ models.get_apps()
+ for d in object_list:
+ # Look up the model and starting build a dict of data for it.
+ Model = _get_model(d["model"])
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
+ m2m_data = {}
+
+ # Handle each field
+ for (field_name, field_value) in d["fields"].iteritems():
+ if isinstance(field_value, str):
+ field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
+
+ field = Model._meta.get_field(field_name)
+
+ # Handle M2M relations
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ m2m_convert = field.rel.to._meta.pk.to_python
+ m2m_data[field.name] = [m2m_convert(smart_unicode(pk)) for pk in field_value]
+
+ # Handle FK fields
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ if field_value:
+ data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ else:
+ data[field.attname] = None
+
+ # Handle all other fields
+ else:
+ data[field.name] = field.to_python(field_value)
+
+ yield base.DeserializedObject(Model(**data), m2m_data)
+
+def _get_model(model_identifier):
+ """
+ Helper to look up a model from an "app_label.module_name" string.
+ """
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(u"Invalid model identifier: '%s'" % model_identifier)
+ return Model
diff --git a/webapp/django/core/serializers/pyyaml.py b/webapp/django/core/serializers/pyyaml.py
new file mode 100644
index 0000000000..ac77166a2f
--- /dev/null
+++ b/webapp/django/core/serializers/pyyaml.py
@@ -0,0 +1,51 @@
+"""
+YAML serializer.
+
+Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
+"""
+
+from StringIO import StringIO
+import yaml
+
+from django.db import models
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to YAML.
+ """
+
+ internal_use_only = False
+
+ def handle_field(self, obj, field):
+ # A nasty special case: base YAML doesn't support serialization of time
+ # types (as opposed to dates or datetimes, which it does support). Since
+ # we want to use the "safe" serializer for better interoperability, we
+ # need to do something with those pesky times. Converting 'em to strings
+ # isn't perfect, but it's better than a "!!python/time" type which would
+ # halt deserialization under any other language.
+ if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
+ self._current[field.name] = str(getattr(obj, field.name))
+ else:
+ super(Serializer, self).handle_field(obj, field)
+
+ def end_serialization(self):
+ self.options.pop('stream', None)
+ self.options.pop('fields', None)
+ yaml.safe_dump(self.objects, self.stream, **self.options)
+
+ def getvalue(self):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of YAML data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(yaml.load(stream)):
+ yield obj
+
diff --git a/webapp/django/core/serializers/xml_serializer.py b/webapp/django/core/serializers/xml_serializer.py
new file mode 100644
index 0000000000..04498db00c
--- /dev/null
+++ b/webapp/django/core/serializers/xml_serializer.py
@@ -0,0 +1,238 @@
+"""
+XML serializer.
+"""
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models
+from django.utils.xmlutils import SimplerXMLGenerator
+from django.utils.encoding import smart_unicode
+from xml.dom import pulldom
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to XML.
+ """
+
+ def indent(self, level):
+ if self.options.get('indent', None) is not None:
+ self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
+
+ def start_serialization(self):
+ """
+ Start serialization -- open the XML document and the root element.
+ """
+ self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
+ self.xml.startDocument()
+ self.xml.startElement("django-objects", {"version" : "1.0"})
+
+ def end_serialization(self):
+ """
+ End serialization -- end the document.
+ """
+ self.indent(0)
+ self.xml.endElement("django-objects")
+ self.xml.endDocument()
+
+ def start_object(self, obj):
+ """
+ Called as each object is handled.
+ """
+ if not hasattr(obj, "_meta"):
+ raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
+
+ self.indent(1)
+ self.xml.startElement("object", {
+ "pk" : smart_unicode(obj._get_pk_val()),
+ "model" : smart_unicode(obj._meta),
+ })
+
+ def end_object(self, obj):
+ """
+ Called after handling all fields for an object.
+ """
+ self.indent(1)
+ self.xml.endElement("object")
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each field on an object (except for ForeignKeys and
+ ManyToManyFields)
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "type" : field.get_internal_type()
+ })
+
+ # Get a "string version" of the object's data (this is handled by the
+ # serializer base class).
+ if getattr(obj, field.name) is not None:
+ value = self.get_string_value(obj, field)
+ self.xml.characters(smart_unicode(value))
+ else:
+ self.xml.addQuickElement("None")
+
+ self.xml.endElement("field")
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey (we need to treat them slightly
+ differently from regular fields).
+ """
+ self._start_relational_field(field)
+ related = getattr(obj, field.name)
+ if related is not None:
+ if field.rel.field_name == related._meta.pk.name:
+ # Related to remote object via primary key
+ related = related._get_pk_val()
+ else:
+ # Related to remote object via other field
+ related = getattr(related, field.rel.field_name)
+ self.xml.characters(smart_unicode(related))
+ else:
+ self.xml.addQuickElement("None")
+ self.xml.endElement("field")
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField. Related objects are only
+ serialized as references to the object's PK (i.e. the related *data*
+ is not dumped, just the relation).
+ """
+ if field.creates_table:
+ self._start_relational_field(field)
+ for relobj in getattr(obj, field.name).iterator():
+ self.xml.addQuickElement("object", attrs={"pk" : smart_unicode(relobj._get_pk_val())})
+ self.xml.endElement("field")
+
+ def _start_relational_field(self, field):
+ """
+ Helper to output the <field> element for relational fields
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "rel" : field.rel.__class__.__name__,
+ "to" : smart_unicode(field.rel.to._meta),
+ })
+
+class Deserializer(base.Deserializer):
+ """
+ Deserialize XML.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ super(Deserializer, self).__init__(stream_or_string, **options)
+ self.event_stream = pulldom.parse(self.stream)
+
+ def next(self):
+ for event, node in self.event_stream:
+ if event == "START_ELEMENT" and node.nodeName == "object":
+ self.event_stream.expandNode(node)
+ return self._handle_object(node)
+ raise StopIteration
+
+ def _handle_object(self, node):
+ """
+ Convert an <object> node to a DeserializedObject.
+ """
+ # Look up the model using the model loading mechanism. If this fails,
+ # bail.
+ Model = self._get_model_from_node(node, "model")
+
+ # Start building a data dictionary from the object. If the node is
+ # missing the pk attribute, bail.
+ pk = node.getAttribute("pk")
+ if not pk:
+ raise base.DeserializationError("<object> node is missing the 'pk' attribute")
+
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
+
+ # Also start building a dict of m2m data (this is saved as
+ # {m2m_accessor_attribute : [list_of_related_objects]})
+ m2m_data = {}
+
+ # Deseralize each field.
+ for field_node in node.getElementsByTagName("field"):
+ # If the field is missing the name attribute, bail (are you
+ # sensing a pattern here?)
+ field_name = field_node.getAttribute("name")
+ if not field_name:
+ raise base.DeserializationError("<field> node is missing the 'name' attribute")
+
+ # Get the field from the Model. This will raise a
+ # FieldDoesNotExist if, well, the field doesn't exist, which will
+ # be propagated correctly.
+ field = Model._meta.get_field(field_name)
+
+ # As is usually the case, relation fields get the special treatment.
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ data[field.attname] = self._handle_fk_field_node(field_node, field)
+ else:
+ if field_node.getElementsByTagName('None'):
+ value = None
+ else:
+ value = field.to_python(getInnerText(field_node).strip())
+ data[field.name] = value
+
+ # Return a DeserializedObject so that the m2m data has a place to live.
+ return base.DeserializedObject(Model(**data), m2m_data)
+
+ def _handle_fk_field_node(self, node, field):
+ """
+ Handle a <field> node for a ForeignKey
+ """
+ # Check if there is a child node named 'None', returning None if so.
+ if node.getElementsByTagName('None'):
+ return None
+ else:
+ return field.rel.to._meta.get_field(field.rel.field_name).to_python(
+ getInnerText(node).strip())
+
+ def _handle_m2m_field_node(self, node, field):
+ """
+ Handle a <field> node for a ManyToManyField.
+ """
+ return [field.rel.to._meta.pk.to_python(
+ c.getAttribute("pk"))
+ for c in node.getElementsByTagName("object")]
+
+ def _get_model_from_node(self, node, attr):
+ """
+ Helper to look up a model from a <object model=...> or a <field
+ rel=... to=...> node.
+ """
+ model_identifier = node.getAttribute(attr)
+ if not model_identifier:
+ raise base.DeserializationError(
+ "<%s> node is missing the required '%s' attribute" \
+ % (node.nodeName, attr))
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(
+ "<%s> node has invalid model identifier: '%s'" % \
+ (node.nodeName, model_identifier))
+ return Model
+
+
+def getInnerText(node):
+ """
+ Get all the inner text of a DOM node (recursively).
+ """
+ # inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
+ inner_text = []
+ for child in node.childNodes:
+ if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
+ inner_text.append(child.data)
+ elif child.nodeType == child.ELEMENT_NODE:
+ inner_text.extend(getInnerText(child))
+ else:
+ pass
+ return u"".join(inner_text)
+
diff --git a/webapp/django/core/servers/__init__.py b/webapp/django/core/servers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/webapp/django/core/servers/__init__.py
diff --git a/webapp/django/core/servers/basehttp.py b/webapp/django/core/servers/basehttp.py
new file mode 100644
index 0000000000..f88f8c70bb
--- /dev/null
+++ b/webapp/django/core/servers/basehttp.py
@@ -0,0 +1,665 @@
+"""
+BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21).
+
+Adapted from wsgiref.simple_server: http://svn.eby-sarna.com/wsgiref/
+
+This is a simple server for use in testing or debugging Django apps. It hasn't
+been reviewed for security issues. Don't use it for production use.
+"""
+
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+import mimetypes
+import os
+import re
+import sys
+import urllib
+
+from django.utils.http import http_date
+
+__version__ = "0.1"
+__all__ = ['WSGIServer','WSGIRequestHandler']
+
+server_version = "WSGIServer/" + __version__
+sys_version = "Python/" + sys.version.split()[0]
+software_version = server_version + ' ' + sys_version
+
+class WSGIServerException(Exception):
+ pass
+
+class FileWrapper(object):
+ """Wrapper to convert file-like objects to iterables"""
+
+ def __init__(self, filelike, blksize=8192):
+ self.filelike = filelike
+ self.blksize = blksize
+ if hasattr(filelike,'close'):
+ self.close = filelike.close
+
+ def __getitem__(self,key):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise IndexError
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise StopIteration
+
+# Regular expression that matches `special' characters in parameters, the
+# existence of which force quoting of the parameter value.
+tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
+
+def _formatparam(param, value=None, quote=1):
+ """Convenience function to format and return a key=value pair.
+
+ This will quote the value if needed or if quote is true.
+ """
+ if value is not None and len(value) > 0:
+ if quote or tspecials.search(value):
+ value = value.replace('\\', '\\\\').replace('"', r'\"')
+ return '%s="%s"' % (param, value)
+ else:
+ return '%s=%s' % (param, value)
+ else:
+ return param
+
+class Headers(object):
+ """Manage a collection of HTTP response headers"""
+ def __init__(self,headers):
+ if not isinstance(headers, list):
+ raise TypeError("Headers must be a list of name/value tuples")
+ self._headers = headers
+
+ def __len__(self):
+ """Return the total number of headers, including duplicates."""
+ return len(self._headers)
+
+ def __setitem__(self, name, val):
+ """Set the value of a header."""
+ del self[name]
+ self._headers.append((name, val))
+
+ def __delitem__(self,name):
+ """Delete all occurrences of a header, if present.
+
+ Does *not* raise an exception if the header is missing.
+ """
+ name = name.lower()
+ self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name]
+
+ def __getitem__(self,name):
+ """Get the first header value for 'name'
+
+ Return None if the header is missing instead of raising an exception.
+
+ Note that if the header appeared multiple times, the first exactly which
+ occurrance gets returned is undefined. Use getall() to get all
+ the values matching a header field name.
+ """
+ return self.get(name)
+
+ def has_key(self, name):
+ """Return true if the message contains the header."""
+ return self.get(name) is not None
+
+ __contains__ = has_key
+
+ def get_all(self, name):
+ """Return a list of all the values for the named field.
+
+ These will be sorted in the order they appeared in the original header
+ list or were added to this instance, and may contain duplicates. Any
+ fields deleted and re-inserted are always appended to the header list.
+ If no fields exist with the given name, returns an empty list.
+ """
+ name = name.lower()
+ return [kv[1] for kv in self._headers if kv[0].lower()==name]
+
+
+ def get(self,name,default=None):
+ """Get the first header value for 'name', or return 'default'"""
+ name = name.lower()
+ for k,v in self._headers:
+ if k.lower()==name:
+ return v
+ return default
+
+ def keys(self):
+ """Return a list of all the header field names.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [k for k, v in self._headers]
+
+ def values(self):
+ """Return a list of all header values.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [v for k, v in self._headers]
+
+ def items(self):
+ """Get all the header fields and values.
+
+ These will be sorted in the order they were in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return self._headers[:]
+
+ def __repr__(self):
+ return "Headers(%s)" % `self._headers`
+
+ def __str__(self):
+ """str() returns the formatted headers, complete with end line,
+ suitable for direct HTTP transmission."""
+ return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
+
+ def setdefault(self,name,value):
+ """Return first matching header value for 'name', or 'value'
+
+ If there is no header named 'name', add a new header with name 'name'
+ and value 'value'."""
+ result = self.get(name)
+ if result is None:
+ self._headers.append((name,value))
+ return value
+ else:
+ return result
+
+ def add_header(self, _name, _value, **_params):
+ """Extended header setting.
+
+ _name is the header field to add. keyword arguments can be used to set
+ additional parameters for the header field, with underscores converted
+ to dashes. Normally the parameter will be added as key="value" unless
+ value is None, in which case only the key will be added.
+
+ Example:
+
+ h.add_header('content-disposition', 'attachment', filename='bud.gif')
+
+ Note that unlike the corresponding 'email.Message' method, this does
+ *not* handle '(charset, language, value)' tuples: all values must be
+ strings or None.
+ """
+ parts = []
+ if _value is not None:
+ parts.append(_value)
+ for k, v in _params.items():
+ if v is None:
+ parts.append(k.replace('_', '-'))
+ else:
+ parts.append(_formatparam(k.replace('_', '-'), v))
+ self._headers.append((_name, "; ".join(parts)))
+
+def guess_scheme(environ):
+ """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
+ """
+ if environ.get("HTTPS") in ('yes','on','1'):
+ return 'https'
+ else:
+ return 'http'
+
+_hop_headers = {
+ 'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
+ 'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
+ 'upgrade':1
+}
+
+def is_hop_by_hop(header_name):
+ """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
+ return header_name.lower() in _hop_headers
+
+class ServerHandler(object):
+ """Manage the invocation of a WSGI application"""
+
+ # Configuration parameters; can override per-subclass or per-instance
+ wsgi_version = (1,0)
+ wsgi_multithread = True
+ wsgi_multiprocess = True
+ wsgi_run_once = False
+
+ origin_server = True # We are transmitting direct to client
+ http_version = "1.0" # Version that should be used for response
+ server_software = software_version
+
+ # os_environ is used to supply configuration from the OS environment:
+ # by default it's a copy of 'os.environ' as of import time, but you can
+ # override this in e.g. your __init__ method.
+ os_environ = dict(os.environ.items())
+
+ # Collaborator classes
+ wsgi_file_wrapper = FileWrapper # set to None to disable
+ headers_class = Headers # must be a Headers-like class
+
+ # Error handling (also per-subclass or per-instance)
+ traceback_limit = None # Print entire traceback to self.get_stderr()
+ error_status = "500 INTERNAL SERVER ERROR"
+ error_headers = [('Content-Type','text/plain')]
+
+ # State variables (don't mess with these)
+ status = result = None
+ headers_sent = False
+ headers = None
+ bytes_sent = 0
+
+ def __init__(self, stdin, stdout, stderr, environ, multithread=True,
+ multiprocess=False):
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.base_env = environ
+ self.wsgi_multithread = multithread
+ self.wsgi_multiprocess = multiprocess
+
+ def run(self, application):
+ """Invoke the application"""
+ # Note to self: don't move the close()! Asynchronous servers shouldn't
+ # call close() from finish_response(), so if you close() anywhere but
+ # the double-error branch here, you'll break asynchronous servers by
+ # prematurely closing. Async servers must return from 'run()' without
+ # closing if there might still be output to iterate over.
+ try:
+ self.setup_environ()
+ self.result = application(self.environ, self.start_response)
+ self.finish_response()
+ except:
+ try:
+ self.handle_error()
+ except:
+ # If we get an error handling an error, just give up already!
+ self.close()
+ raise # ...and let the actual server figure it out.
+
+ def setup_environ(self):
+ """Set up the environment for one request"""
+
+ env = self.environ = self.os_environ.copy()
+ self.add_cgi_vars()
+
+ env['wsgi.input'] = self.get_stdin()
+ env['wsgi.errors'] = self.get_stderr()
+ env['wsgi.version'] = self.wsgi_version
+ env['wsgi.run_once'] = self.wsgi_run_once
+ env['wsgi.url_scheme'] = self.get_scheme()
+ env['wsgi.multithread'] = self.wsgi_multithread
+ env['wsgi.multiprocess'] = self.wsgi_multiprocess
+
+ if self.wsgi_file_wrapper is not None:
+ env['wsgi.file_wrapper'] = self.wsgi_file_wrapper
+
+ if self.origin_server and self.server_software:
+ env.setdefault('SERVER_SOFTWARE',self.server_software)
+
+ def finish_response(self):
+ """Send any iterable data, then close self and the iterable
+
+ Subclasses intended for use in asynchronous servers will
+ want to redefine this method, such that it sets up callbacks
+ in the event loop to iterate over the data, and to call
+ 'self.close()' once the response is finished.
+ """
+ if not self.result_is_file() and not self.sendfile():
+ for data in self.result:
+ self.write(data)
+ self.finish_content()
+ self.close()
+
+ def get_scheme(self):
+ """Return the URL scheme being used"""
+ return guess_scheme(self.environ)
+
+ def set_content_length(self):
+ """Compute Content-Length or switch to chunked encoding if possible"""
+ try:
+ blocks = len(self.result)
+ except (TypeError, AttributeError, NotImplementedError):
+ pass
+ else:
+ if blocks==1:
+ self.headers['Content-Length'] = str(self.bytes_sent)
+ return
+ # XXX Try for chunked encoding if origin server and client is 1.1
+
+ def cleanup_headers(self):
+ """Make any necessary header changes or defaults
+
+ Subclasses can extend this to add other defaults.
+ """
+ if 'Content-Length' not in self.headers:
+ self.set_content_length()
+
+ def start_response(self, status, headers,exc_info=None):
+ """'start_response()' callable as specified by PEP 333"""
+
+ if exc_info:
+ try:
+ if self.headers_sent:
+ # Re-raise original exception if headers sent
+ raise exc_info[0], exc_info[1], exc_info[2]
+ finally:
+ exc_info = None # avoid dangling circular ref
+ elif self.headers is not None:
+ raise AssertionError("Headers already set!")
+
+ assert isinstance(status, str),"Status must be a string"
+ assert len(status)>=4,"Status must be at least 4 characters"
+ assert int(status[:3]),"Status message must begin w/3-digit code"
+ assert status[3]==" ", "Status message must have a space after code"
+ if __debug__:
+ for name,val in headers:
+ assert isinstance(name, str),"Header names must be strings"
+ assert isinstance(val, str),"Header values must be strings"
+ assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed"
+ self.status = status
+ self.headers = self.headers_class(headers)
+ return self.write
+
+ def send_preamble(self):
+ """Transmit version/status/date/server, via self._write()"""
+ if self.origin_server:
+ if self.client_is_modern():
+ self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
+ if 'Date' not in self.headers:
+ self._write(
+ 'Date: %s\r\n' % http_date()
+ )
+ if self.server_software and 'Server' not in self.headers:
+ self._write('Server: %s\r\n' % self.server_software)
+ else:
+ self._write('Status: %s\r\n' % self.status)
+
+ def write(self, data):
+ """'write()' callable as specified by PEP 333"""
+
+ assert isinstance(data, str), "write() argument must be string"
+
+ if not self.status:
+ raise AssertionError("write() before start_response()")
+
+ elif not self.headers_sent:
+ # Before the first output, send the stored headers
+ self.bytes_sent = len(data) # make sure we know content-length
+ self.send_headers()
+ else:
+ self.bytes_sent += len(data)
+
+ # XXX check Content-Length and truncate if too many bytes written?
+
+ # If data is too large, socket will choke, so write chunks no larger
+ # than 32MB at a time.
+ length = len(data)
+ if length > 33554432:
+ offset = 0
+ while offset < length:
+ chunk_size = min(33554432, length)
+ self._write(data[offset:offset+chunk_size])
+ self._flush()
+ offset += chunk_size
+ else:
+ self._write(data)
+ self._flush()
+
+ def sendfile(self):
+ """Platform-specific file transmission
+
+ Override this method in subclasses to support platform-specific
+ file transmission. It is only called if the application's
+ return iterable ('self.result') is an instance of
+ 'self.wsgi_file_wrapper'.
+
+ This method should return a true value if it was able to actually
+ transmit the wrapped file-like object using a platform-specific
+ approach. It should return a false value if normal iteration
+ should be used instead. An exception can be raised to indicate
+ that transmission was attempted, but failed.
+
+ NOTE: this method should call 'self.send_headers()' if
+ 'self.headers_sent' is false and it is going to attempt direct
+ transmission of the file1.
+ """
+ return False # No platform-specific transmission by default
+
+ def finish_content(self):
+ """Ensure headers and content have both been sent"""
+ if not self.headers_sent:
+ self.headers['Content-Length'] = "0"
+ self.send_headers()
+ else:
+ pass # XXX check if content-length was too short?
+
+ def close(self):
+ try:
+ self.request_handler.log_request(self.status.split(' ',1)[0], self.bytes_sent)
+ finally:
+ try:
+ if hasattr(self.result,'close'):
+ self.result.close()
+ finally:
+ self.result = self.headers = self.status = self.environ = None
+ self.bytes_sent = 0; self.headers_sent = False
+
+ def send_headers(self):
+ """Transmit headers to the client, via self._write()"""
+ self.cleanup_headers()
+ self.headers_sent = True
+ if not self.origin_server or self.client_is_modern():
+ self.send_preamble()
+ self._write(str(self.headers))
+
+ def result_is_file(self):
+ """True if 'self.result' is an instance of 'self.wsgi_file_wrapper'"""
+ wrapper = self.wsgi_file_wrapper
+ return wrapper is not None and isinstance(self.result,wrapper)
+
+ def client_is_modern(self):
+ """True if client can accept status and headers"""
+ return self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9'
+
+ def log_exception(self,exc_info):
+ """Log the 'exc_info' tuple in the server log
+
+ Subclasses may override to retarget the output or change its format.
+ """
+ try:
+ from traceback import print_exception
+ stderr = self.get_stderr()
+ print_exception(
+ exc_info[0], exc_info[1], exc_info[2],
+ self.traceback_limit, stderr
+ )
+ stderr.flush()
+ finally:
+ exc_info = None
+
+ def handle_error(self):
+ """Log current error, and send error output to client if possible"""
+ self.log_exception(sys.exc_info())
+ if not self.headers_sent:
+ self.result = self.error_output(self.environ, self.start_response)
+ self.finish_response()
+ # XXX else: attempt advanced recovery techniques for HTML or text?
+
+ def error_output(self, environ, start_response):
+ import traceback
+ start_response(self.error_status, self.error_headers[:], sys.exc_info())
+ return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
+
+ # Pure abstract methods; *must* be overridden in subclasses
+
+ def _write(self,data):
+ self.stdout.write(data)
+ self._write = self.stdout.write
+
+ def _flush(self):
+ self.stdout.flush()
+ self._flush = self.stdout.flush
+
+ def get_stdin(self):
+ return self.stdin
+
+ def get_stderr(self):
+ return self.stderr
+
+ def add_cgi_vars(self):
+ self.environ.update(self.base_env)
+
+class WSGIServer(HTTPServer):
+ """BaseHTTPServer that implements the Python WSGI protocol"""
+ application = None
+
+ def server_bind(self):
+ """Override server_bind to store the server name."""
+ try:
+ HTTPServer.server_bind(self)
+ except Exception, e:
+ raise WSGIServerException, e
+ self.setup_environ()
+
+ def setup_environ(self):
+ # Set up base environment
+ env = self.base_environ = {}
+ env['SERVER_NAME'] = self.server_name
+ env['GATEWAY_INTERFACE'] = 'CGI/1.1'
+ env['SERVER_PORT'] = str(self.server_port)
+ env['REMOTE_HOST']=''
+ env['CONTENT_LENGTH']=''
+ env['SCRIPT_NAME'] = ''
+
+ def get_app(self):
+ return self.application
+
+ def set_app(self,application):
+ self.application = application
+
+class WSGIRequestHandler(BaseHTTPRequestHandler):
+ server_version = "WSGIServer/" + __version__
+
+ def __init__(self, *args, **kwargs):
+ from django.conf import settings
+ self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
+ # We set self.path to avoid crashes in log_message() on unsupported
+ # requests (like "OPTIONS").
+ self.path = ''
+ BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+ def get_environ(self):
+ env = self.server.base_environ.copy()
+ env['SERVER_PROTOCOL'] = self.request_version
+ env['REQUEST_METHOD'] = self.command
+ if '?' in self.path:
+ path,query = self.path.split('?',1)
+ else:
+ path,query = self.path,''
+
+ env['PATH_INFO'] = urllib.unquote(path)
+ env['QUERY_STRING'] = query
+ env['REMOTE_ADDR'] = self.client_address[0]
+
+ if self.headers.typeheader is None:
+ env['CONTENT_TYPE'] = self.headers.type
+ else:
+ env['CONTENT_TYPE'] = self.headers.typeheader
+
+ length = self.headers.getheader('content-length')
+ if length:
+ env['CONTENT_LENGTH'] = length
+
+ for h in self.headers.headers:
+ k,v = h.split(':',1)
+ k=k.replace('-','_').upper(); v=v.strip()
+ if k in env:
+ continue # skip content length, type,etc.
+ if 'HTTP_'+k in env:
+ env['HTTP_'+k] += ','+v # comma-separate multiple headers
+ else:
+ env['HTTP_'+k] = v
+ return env
+
+ def get_stderr(self):
+ return sys.stderr
+
+ def handle(self):
+ """Handle a single HTTP request"""
+ self.raw_requestline = self.rfile.readline()
+ if not self.parse_request(): # An error code has been sent, just exit
+ return
+ handler = ServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ())
+ handler.request_handler = self # backpointer for logging
+ handler.run(self.server.get_app())
+
+ def log_message(self, format, *args):
+ # Don't bother logging requests for admin images or the favicon.
+ if self.path.startswith(self.admin_media_prefix) or self.path == '/favicon.ico':
+ return
+ sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format % args))
+
+class AdminMediaHandler(object):
+ """
+ WSGI middleware that intercepts calls to the admin media directory, as
+ defined by the ADMIN_MEDIA_PREFIX setting, and serves those images.
+ Use this ONLY LOCALLY, for development! This hasn't been tested for
+ security and is not super efficient.
+ """
+ def __init__(self, application, media_dir=None):
+ from django.conf import settings
+ self.application = application
+ if not media_dir:
+ import django
+ self.media_dir = django.__path__[0] + '/contrib/admin/media'
+ else:
+ self.media_dir = media_dir
+ self.media_url = settings.ADMIN_MEDIA_PREFIX
+
+ def __call__(self, environ, start_response):
+ import os.path
+
+ # Ignore requests that aren't under ADMIN_MEDIA_PREFIX. Also ignore
+ # all requests if ADMIN_MEDIA_PREFIX isn't a relative URL.
+ if self.media_url.startswith('http://') or self.media_url.startswith('https://') \
+ or not environ['PATH_INFO'].startswith(self.media_url):
+ return self.application(environ, start_response)
+
+ # Find the admin file and serve it up, if it exists and is readable.
+ relative_url = environ['PATH_INFO'][len(self.media_url):]
+ file_path = os.path.join(self.media_dir, relative_url)
+ if not os.path.exists(file_path):
+ status = '404 NOT FOUND'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Page not found: %s' % file_path]
+ else:
+ try:
+ fp = open(file_path, 'rb')
+ except IOError:
+ status = '401 UNAUTHORIZED'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Permission denied: %s' % file_path]
+ else:
+ status = '200 OK'
+ headers = {}
+ mime_type = mimetypes.guess_type(file_path)[0]
+ if mime_type:
+ headers['Content-Type'] = mime_type
+ output = [fp.read()]
+ fp.close()
+ start_response(status, headers.items())
+ return output
+
+def run(addr, port, wsgi_handler):
+ server_address = (addr, port)
+ httpd = WSGIServer(server_address, WSGIRequestHandler)
+ httpd.set_app(wsgi_handler)
+ httpd.serve_forever()
diff --git a/webapp/django/core/servers/fastcgi.py b/webapp/django/core/servers/fastcgi.py
new file mode 100644
index 0000000000..dc4c35b08d
--- /dev/null
+++ b/webapp/django/core/servers/fastcgi.py
@@ -0,0 +1,179 @@
+"""
+FastCGI (or SCGI, or AJP1.3 ...) server that implements the WSGI protocol.
+
+Uses the flup python package: http://www.saddi.com/software/flup/
+
+This is a adaptation of the flup package to add FastCGI server support
+to run Django apps from Web servers that support the FastCGI protocol.
+This module can be run standalone or from the django-admin / manage.py
+scripts using the "runfcgi" directive.
+
+Run with the extra option "help" for a list of additional options you can
+pass to this server.
+"""
+
+import sys, os
+
+__version__ = "0.1"
+__all__ = ["runfastcgi"]
+
+FASTCGI_HELP = r"""
+ Run this project as a fastcgi (or some other protocol supported
+ by flup) application. To do this, the flup package from
+ http://www.saddi.com/software/flup/ is required.
+
+ runfcgi [options] [fcgi settings]
+
+Optional Fcgi settings: (setting=value)
+ protocol=PROTOCOL fcgi, scgi, ajp, ... (default fcgi)
+ host=HOSTNAME hostname to listen on..
+ port=PORTNUM port to listen on.
+ socket=FILE UNIX socket to listen on.
+ method=IMPL prefork or threaded (default prefork)
+ maxrequests=NUMBER number of requests a child handles before it is
+ killed and a new child is forked (0 = no limit).
+ maxspare=NUMBER max number of spare processes / threads
+ minspare=NUMBER min number of spare processes / threads.
+ maxchildren=NUMBER hard limit number of processes / threads
+ daemonize=BOOL whether to detach from terminal.
+ pidfile=FILE write the spawned process-id to this file.
+ workdir=DIRECTORY change to this directory when daemonizing.
+ outlog=FILE write stdout to this file.
+ errlog=FILE write stderr to this file.
+ umask=UMASK umask to use when daemonizing (default 022).
+
+Examples:
+ Run a "standard" fastcgi process on a file-descriptor
+ (for webservers which spawn your processes for you)
+ $ manage.py runfcgi method=threaded
+
+ Run a scgi server on a TCP host/port
+ $ manage.py runfcgi protocol=scgi method=prefork host=127.0.0.1 port=8025
+
+ Run a fastcgi server on a UNIX domain socket (posix platforms only)
+ $ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
+
+ Run a fastCGI as a daemon and write the spawned PID in a file
+ $ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
+ daemonize=true pidfile=/var/run/django-fcgi.pid
+
+"""
+
+FASTCGI_OPTIONS = {
+ 'protocol': 'fcgi',
+ 'host': None,
+ 'port': None,
+ 'socket': None,
+ 'method': 'fork',
+ 'daemonize': None,
+ 'workdir': '/',
+ 'pidfile': None,
+ 'maxspare': 5,
+ 'minspare': 2,
+ 'maxchildren': 50,
+ 'maxrequests': 0,
+ 'outlog': None,
+ 'errlog': None,
+ 'umask': None,
+}
+
+def fastcgi_help(message=None):
+ print FASTCGI_HELP
+ if message:
+ print message
+ return False
+
+def runfastcgi(argset=[], **kwargs):
+ options = FASTCGI_OPTIONS.copy()
+ options.update(kwargs)
+ for x in argset:
+ if "=" in x:
+ k, v = x.split('=', 1)
+ else:
+ k, v = x, True
+ options[k.lower()] = v
+
+ if "help" in options:
+ return fastcgi_help()
+
+ try:
+ import flup
+ except ImportError, e:
+ print >> sys.stderr, "ERROR: %s" % e
+ print >> sys.stderr, " Unable to load the flup package. In order to run django"
+ print >> sys.stderr, " as a FastCGI application, you will need to get flup from"
+ print >> sys.stderr, " http://www.saddi.com/software/flup/ If you've already"
+ print >> sys.stderr, " installed flup, then make sure you have it in your PYTHONPATH."
+ return False
+
+ flup_module = 'server.' + options['protocol']
+
+ if options['method'] in ('prefork', 'fork'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxChildren': int(options["maxchildren"]),
+ 'maxRequests': int(options["maxrequests"]),
+ }
+ flup_module += '_fork'
+ elif options['method'] in ('thread', 'threaded'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxThreads': int(options["maxchildren"]),
+ }
+ else:
+ return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
+
+ wsgi_opts['debug'] = False # Turn off flup tracebacks
+
+ try:
+ WSGIServer = getattr(__import__('flup.' + flup_module, '', '', flup_module), 'WSGIServer')
+ except:
+ print "Can't import flup." + flup_module
+ return False
+
+ # Prep up and go
+ from django.core.handlers.wsgi import WSGIHandler
+
+ if options["host"] and options["port"] and not options["socket"]:
+ wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
+ elif options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = options["socket"]
+ elif not options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = None
+ else:
+ return fastcgi_help("Invalid combination of host, port, socket.")
+
+ if options["daemonize"] is None:
+ # Default to daemonizing if we're running on a socket/named pipe.
+ daemonize = (wsgi_opts['bindAddress'] is not None)
+ else:
+ if options["daemonize"].lower() in ('true', 'yes', 't'):
+ daemonize = True
+ elif options["daemonize"].lower() in ('false', 'no', 'f'):
+ daemonize = False
+ else:
+ return fastcgi_help("ERROR: Invalid option for daemonize parameter.")
+
+ daemon_kwargs = {}
+ if options['outlog']:
+ daemon_kwargs['out_log'] = options['outlog']
+ if options['errlog']:
+ daemon_kwargs['err_log'] = options['errlog']
+ if options['umask']:
+ daemon_kwargs['umask'] = int(options['umask'])
+
+ if daemonize:
+ from django.utils.daemonize import become_daemon
+ become_daemon(our_home_dir=options["workdir"], **daemon_kwargs)
+
+ if options["pidfile"]:
+ fp = open(options["pidfile"], "w")
+ fp.write("%d\n" % os.getpid())
+ fp.close()
+
+ WSGIServer(WSGIHandler(), **wsgi_opts).run()
+
+if __name__ == '__main__':
+ runfastcgi(sys.argv[1:])
diff --git a/webapp/django/core/signals.py b/webapp/django/core/signals.py
new file mode 100644
index 0000000000..a14af009ed
--- /dev/null
+++ b/webapp/django/core/signals.py
@@ -0,0 +1,5 @@
+from django.dispatch import Signal
+
+request_started = Signal()
+request_finished = Signal()
+got_request_exception = Signal(providing_args=["request"])
diff --git a/webapp/django/core/template_loader.py b/webapp/django/core/template_loader.py
new file mode 100644
index 0000000000..ee86178cc1
--- /dev/null
+++ b/webapp/django/core/template_loader.py
@@ -0,0 +1,7 @@
+# This module is DEPRECATED!
+#
+# You should no longer be using django.template_loader.
+#
+# Use django.template.loader instead.
+
+from django.template.loader import *
diff --git a/webapp/django/core/urlresolvers.py b/webapp/django/core/urlresolvers.py
new file mode 100644
index 0000000000..f6e09c1f3e
--- /dev/null
+++ b/webapp/django/core/urlresolvers.py
@@ -0,0 +1,330 @@
+"""
+This module converts requested URLs to callback view functions.
+
+RegexURLResolver is the main class here. Its resolve() method takes a URL (as
+a string) and returns a tuple in this format:
+
+ (view_function, function_args, function_kwargs)
+"""
+
+import re
+
+from django.http import Http404
+from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
+from django.utils.encoding import iri_to_uri, force_unicode, smart_str
+from django.utils.functional import memoize
+from django.utils.thread_support import currentThread
+
+try:
+ reversed
+except NameError:
+ from django.utils.itercompat import reversed # Python 2.3 fallback
+
+_resolver_cache = {} # Maps urlconf modules to RegexURLResolver instances.
+_callable_cache = {} # Maps view and url pattern names to their view functions.
+
+# SCRIPT_NAME prefixes for each thread are stored here. If there's no entry for
+# the current thread (which is the only one we ever access), it is assumed to
+# be empty.
+_prefixes = {}
+
+class Resolver404(Http404):
+ pass
+
+class NoReverseMatch(Exception):
+ # Don't make this raise an error when used in a template.
+ silent_variable_failure = True
+
+def get_callable(lookup_view, can_fail=False):
+ """
+ Convert a string version of a function name to the callable object.
+
+ If the lookup_view is not an import path, it is assumed to be a URL pattern
+ label and the original string is returned.
+
+ If can_fail is True, lookup_view might be a URL pattern label, so errors
+ during the import fail and the string is returned.
+ """
+ if not callable(lookup_view):
+ try:
+ # Bail early for non-ASCII strings (they can't be functions).
+ lookup_view = lookup_view.encode('ascii')
+ mod_name, func_name = get_mod_func(lookup_view)
+ if func_name != '':
+ lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+ except (ImportError, AttributeError):
+ if not can_fail:
+ raise
+ except UnicodeEncodeError:
+ pass
+ return lookup_view
+get_callable = memoize(get_callable, _callable_cache, 1)
+
+def get_resolver(urlconf):
+ if urlconf is None:
+ from django.conf import settings
+ urlconf = settings.ROOT_URLCONF
+ return RegexURLResolver(r'^/', urlconf)
+get_resolver = memoize(get_resolver, _resolver_cache, 1)
+
+def get_mod_func(callback):
+ # Converts 'django.views.news.stories.story_detail' to
+ # ['django.views.news.stories', 'story_detail']
+ try:
+ dot = callback.rindex('.')
+ except ValueError:
+ return callback, ''
+ return callback[:dot], callback[dot+1:]
+
+def reverse_helper(regex, *args, **kwargs):
+ """
+ Does a "reverse" lookup -- returns the URL for the given args/kwargs.
+ The args/kwargs are applied to the given compiled regular expression.
+ For example:
+
+ >>> reverse_helper(re.compile('^places/(\d+)/$'), 3)
+ 'places/3/'
+ >>> reverse_helper(re.compile('^places/(?P<id>\d+)/$'), id=3)
+ 'places/3/'
+ >>> reverse_helper(re.compile('^people/(?P<state>\w\w)/(\w+)/$'), 'adrian', state='il')
+ 'people/il/adrian/'
+
+ Raises NoReverseMatch if the args/kwargs aren't valid for the regex.
+ """
+ # TODO: Handle nested parenthesis in the following regex.
+ result = re.sub(r'\(([^)]+)\)', MatchChecker(args, kwargs), regex.pattern)
+ return result.replace('^', '').replace('$', '').replace('\\', '')
+
+class MatchChecker(object):
+ "Class used in reverse RegexURLPattern lookup."
+ def __init__(self, args, kwargs):
+ self.args, self.kwargs = args, kwargs
+ self.current_arg = 0
+
+ def __call__(self, match_obj):
+ # match_obj.group(1) is the contents of the parenthesis.
+ # First we need to figure out whether it's a named or unnamed group.
+ #
+ grouped = match_obj.group(1)
+ m = re.search(r'^\?P<(\w+)>(.*?)$', grouped, re.UNICODE)
+ if m: # If this was a named group...
+ # m.group(1) is the name of the group
+ # m.group(2) is the regex.
+ try:
+ value = self.kwargs[m.group(1)]
+ except KeyError:
+ # It was a named group, but the arg was passed in as a
+ # positional arg or not at all.
+ try:
+ value = self.args[self.current_arg]
+ self.current_arg += 1
+ except IndexError:
+ # The arg wasn't passed in.
+ raise NoReverseMatch('Not enough positional arguments passed in')
+ test_regex = m.group(2)
+ else: # Otherwise, this was a positional (unnamed) group.
+ try:
+ value = self.args[self.current_arg]
+ self.current_arg += 1
+ except IndexError:
+ # The arg wasn't passed in.
+ raise NoReverseMatch('Not enough positional arguments passed in')
+ test_regex = grouped
+ # Note we're using re.match here on purpose because the start of
+ # to string needs to match.
+ if not re.match(test_regex + '$', force_unicode(value), re.UNICODE):
+ raise NoReverseMatch("Value %r didn't match regular expression %r" % (value, test_regex))
+ return force_unicode(value)
+
+class RegexURLPattern(object):
+ def __init__(self, regex, callback, default_args=None, name=None):
+ # regex is a string representing a regular expression.
+ # callback is either a string like 'foo.views.news.stories.story_detail'
+ # which represents the path to a module and a view function name, or a
+ # callable object (view).
+ self.regex = re.compile(regex, re.UNICODE)
+ if callable(callback):
+ self._callback = callback
+ else:
+ self._callback = None
+ self._callback_str = callback
+ self.default_args = default_args or {}
+ self.name = name
+
+ def __repr__(self):
+ return '<%s %s %s>' % (self.__class__.__name__, self.name, self.regex.pattern)
+
+ def add_prefix(self, prefix):
+ """
+ Adds the prefix string to a string-based callback.
+ """
+ if not prefix or not hasattr(self, '_callback_str'):
+ return
+ self._callback_str = prefix + '.' + self._callback_str
+
+ def resolve(self, path):
+ match = self.regex.search(path)
+ if match:
+ # If there are any named groups, use those as kwargs, ignoring
+ # non-named groups. Otherwise, pass all non-named arguments as
+ # positional arguments.
+ kwargs = match.groupdict()
+ if kwargs:
+ args = ()
+ else:
+ args = match.groups()
+ # In both cases, pass any extra_kwargs as **kwargs.
+ kwargs.update(self.default_args)
+
+ return self.callback, args, kwargs
+
+ def _get_callback(self):
+ if self._callback is not None:
+ return self._callback
+ try:
+ self._callback = get_callable(self._callback_str)
+ except ImportError, e:
+ mod_name, _ = get_mod_func(self._callback_str)
+ raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e))
+ except AttributeError, e:
+ mod_name, func_name = get_mod_func(self._callback_str)
+ raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e))
+ return self._callback
+ callback = property(_get_callback)
+
+ def reverse(self, viewname, *args, **kwargs):
+ mod_name, func_name = get_mod_func(viewname)
+ try:
+ lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+ except (ImportError, AttributeError):
+ raise NoReverseMatch
+ if lookup_view != self.callback:
+ raise NoReverseMatch
+ return self.reverse_helper(*args, **kwargs)
+
+ def reverse_helper(self, *args, **kwargs):
+ return reverse_helper(self.regex, *args, **kwargs)
+
+class RegexURLResolver(object):
+ def __init__(self, regex, urlconf_name, default_kwargs=None):
+ # regex is a string representing a regular expression.
+ # urlconf_name is a string representing the module containing urlconfs.
+ self.regex = re.compile(regex, re.UNICODE)
+ self.urlconf_name = urlconf_name
+ self.callback = None
+ self.default_kwargs = default_kwargs or {}
+ self._reverse_dict = {}
+
+ def __repr__(self):
+ return '<%s %s %s>' % (self.__class__.__name__, self.urlconf_name, self.regex.pattern)
+
+ def _get_reverse_dict(self):
+ if not self._reverse_dict and hasattr(self.urlconf_module, 'urlpatterns'):
+ for pattern in reversed(self.urlconf_module.urlpatterns):
+ if isinstance(pattern, RegexURLResolver):
+ for key, value in pattern.reverse_dict.iteritems():
+ self._reverse_dict[key] = (pattern,) + value
+ else:
+ self._reverse_dict[pattern.callback] = (pattern,)
+ self._reverse_dict[pattern.name] = (pattern,)
+ return self._reverse_dict
+ reverse_dict = property(_get_reverse_dict)
+
+ def resolve(self, path):
+ tried = []
+ match = self.regex.search(path)
+ if match:
+ new_path = path[match.end():]
+ for pattern in self.urlconf_module.urlpatterns:
+ try:
+ sub_match = pattern.resolve(new_path)
+ except Resolver404, e:
+ tried.extend([(pattern.regex.pattern + ' ' + t) for t in e.args[0]['tried']])
+ else:
+ if sub_match:
+ sub_match_dict = dict([(smart_str(k), v) for k, v in match.groupdict().items()])
+ sub_match_dict.update(self.default_kwargs)
+ for k, v in sub_match[2].iteritems():
+ sub_match_dict[smart_str(k)] = v
+ return sub_match[0], sub_match[1], sub_match_dict
+ tried.append(pattern.regex.pattern)
+ raise Resolver404, {'tried': tried, 'path': new_path}
+
+ def _get_urlconf_module(self):
+ try:
+ return self._urlconf_module
+ except AttributeError:
+ try:
+ self._urlconf_module = __import__(self.urlconf_name, {}, {}, [''])
+ except Exception, e:
+ # Either an invalid urlconf_name, such as "foo.bar.", or some
+ # kind of problem during the actual import.
+ raise ImproperlyConfigured, "Error while importing URLconf %r: %s" % (self.urlconf_name, e)
+ return self._urlconf_module
+ urlconf_module = property(_get_urlconf_module)
+
+ def _get_url_patterns(self):
+ return self.urlconf_module.urlpatterns
+ url_patterns = property(_get_url_patterns)
+
+ def _resolve_special(self, view_type):
+ callback = getattr(self.urlconf_module, 'handler%s' % view_type)
+ mod_name, func_name = get_mod_func(callback)
+ try:
+ return getattr(__import__(mod_name, {}, {}, ['']), func_name), {}
+ except (ImportError, AttributeError), e:
+ raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
+
+ def resolve404(self):
+ return self._resolve_special('404')
+
+ def resolve500(self):
+ return self._resolve_special('500')
+
+ def reverse(self, lookup_view, *args, **kwargs):
+ try:
+ lookup_view = get_callable(lookup_view, True)
+ except (ImportError, AttributeError):
+ raise NoReverseMatch("'%s' is not a callable." % lookup_view)
+ if lookup_view in self.reverse_dict:
+ return u''.join([reverse_helper(part.regex, *args, **kwargs) for part in self.reverse_dict[lookup_view]])
+ raise NoReverseMatch("Reverse for '%s' not found." % lookup_view)
+
+ def reverse_helper(self, lookup_view, *args, **kwargs):
+ sub_match = self.reverse(lookup_view, *args, **kwargs)
+ result = reverse_helper(self.regex, *args, **kwargs)
+ return result + sub_match
+
+def resolve(path, urlconf=None):
+ return get_resolver(urlconf).resolve(path)
+
+def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None):
+ args = args or []
+ kwargs = kwargs or {}
+ if prefix is None:
+ prefix = get_script_prefix()
+ return iri_to_uri(u'%s%s' % (prefix, get_resolver(urlconf).reverse(viewname,
+ *args, **kwargs)))
+
+def clear_url_caches():
+ global _resolver_cache
+ global _callable_cache
+ _resolver_cache.clear()
+ _callable_cache.clear()
+
+def set_script_prefix(prefix):
+ """
+ Sets the script prefix for the current thread.
+ """
+ if not prefix.endswith('/'):
+ prefix += '/'
+ _prefixes[currentThread()] = prefix
+
+def get_script_prefix():
+ """
+ Returns the currently active script prefix. Useful for client code that
+ wishes to construct their own URLs manually (although accessing the request
+ instance is normally going to be a lot cleaner).
+ """
+ return _prefixes.get(currentThread(), u'/')
+
diff --git a/webapp/django/core/validators.py b/webapp/django/core/validators.py
new file mode 100644
index 0000000000..f94db40c1b
--- /dev/null
+++ b/webapp/django/core/validators.py
@@ -0,0 +1,598 @@
+"""
+A library of validators that return None and raise ValidationError when the
+provided data isn't valid.
+
+Validators may be callable classes, and they may have an 'always_test'
+attribute. If an 'always_test' attribute exists (regardless of value), the
+validator will *always* be run, regardless of whether its associated
+form field is required.
+"""
+
+import urllib2
+import re
+try:
+ from decimal import Decimal, DecimalException
+except ImportError:
+ from django.utils._decimal import Decimal, DecimalException # Python 2.3
+
+from django.conf import settings
+from django.utils.translation import ugettext as _, ugettext_lazy, ungettext
+from django.utils.functional import Promise, lazy
+from django.utils.encoding import force_unicode, smart_str
+
+_datere = r'\d{4}-\d{1,2}-\d{1,2}'
+_timere = r'(?:[01]?[0-9]|2[0-3]):[0-5][0-9](?::[0-5][0-9])?'
+alnum_re = re.compile(r'^\w+$')
+alnumurl_re = re.compile(r'^[-\w/]+$')
+ansi_date_re = re.compile('^%s$' % _datere)
+ansi_time_re = re.compile('^%s$' % _timere)
+ansi_datetime_re = re.compile('^%s %s$' % (_datere, _timere))
+email_re = re.compile(
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"' # quoted-string
+ r')@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}$', re.IGNORECASE) # domain
+integer_re = re.compile(r'^-?\d+$')
+ip4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
+phone_re = re.compile(r'^[A-PR-Y0-9]{3}-[A-PR-Y0-9]{3}-[A-PR-Y0-9]{4}$', re.IGNORECASE)
+slug_re = re.compile(r'^[-\w]+$')
+url_re = re.compile(r'^https?://\S+$')
+
+lazy_inter = lazy(lambda a,b: force_unicode(a) % b, unicode)
+
+class ValidationError(Exception):
+ def __init__(self, message):
+ "ValidationError can be passed a string or a list."
+ if isinstance(message, list):
+ self.messages = [force_unicode(msg) for msg in message]
+ else:
+ assert isinstance(message, (basestring, Promise)), ("%s should be a string" % repr(message))
+ self.messages = [force_unicode(message)]
+
+ def __str__(self):
+ # This is needed because, without a __str__(), printing an exception
+ # instance would result in this:
+ # AttributeError: ValidationError instance has no attribute 'args'
+ # See http://www.python.org/doc/current/tut/node10.html#handling
+ return str(self.messages)
+
+class CriticalValidationError(Exception):
+ def __init__(self, message):
+ "ValidationError can be passed a string or a list."
+ if isinstance(message, list):
+ self.messages = [force_unicode(msg) for msg in message]
+ else:
+ assert isinstance(message, (basestring, Promise)), ("'%s' should be a string" % message)
+ self.messages = [force_unicode(message)]
+
+ def __str__(self):
+ return str(self.messages)
+
+def isAlphaNumeric(field_data, all_data):
+ if not alnum_re.search(field_data):
+ raise ValidationError, _("This value must contain only letters, numbers and underscores.")
+
+def isAlphaNumericURL(field_data, all_data):
+ if not alnumurl_re.search(field_data):
+ raise ValidationError, _("This value must contain only letters, numbers, underscores, dashes or slashes.")
+
+def isSlug(field_data, all_data):
+ if not slug_re.search(field_data):
+ raise ValidationError, _("This value must contain only letters, numbers, underscores or hyphens.")
+
+def isLowerCase(field_data, all_data):
+ if field_data.lower() != field_data:
+ raise ValidationError, _("Uppercase letters are not allowed here.")
+
+def isUpperCase(field_data, all_data):
+ if field_data.upper() != field_data:
+ raise ValidationError, _("Lowercase letters are not allowed here.")
+
+def isCommaSeparatedIntegerList(field_data, all_data):
+ for supposed_int in field_data.split(','):
+ try:
+ int(supposed_int)
+ except ValueError:
+ raise ValidationError, _("Enter only digits separated by commas.")
+
+def isCommaSeparatedEmailList(field_data, all_data):
+ """
+ Checks that field_data is a string of e-mail addresses separated by commas.
+ Blank field_data values will not throw a validation error, and whitespace
+ is allowed around the commas.
+ """
+ for supposed_email in field_data.split(','):
+ try:
+ isValidEmail(supposed_email.strip(), '')
+ except ValidationError:
+ raise ValidationError, _("Enter valid e-mail addresses separated by commas.")
+
+def isValidIPAddress4(field_data, all_data):
+ if not ip4_re.search(field_data):
+ raise ValidationError, _("Please enter a valid IP address.")
+
+def isNotEmpty(field_data, all_data):
+ if field_data.strip() == '':
+ raise ValidationError, _("Empty values are not allowed here.")
+
+def isOnlyDigits(field_data, all_data):
+ if not field_data.isdigit():
+ raise ValidationError, _("Non-numeric characters aren't allowed here.")
+
+def isNotOnlyDigits(field_data, all_data):
+ if field_data.isdigit():
+ raise ValidationError, _("This value can't be comprised solely of digits.")
+
+def isInteger(field_data, all_data):
+ # This differs from isOnlyDigits because this accepts the negative sign
+ if not integer_re.search(field_data):
+ raise ValidationError, _("Enter a whole number.")
+
+def isOnlyLetters(field_data, all_data):
+ if not field_data.isalpha():
+ raise ValidationError, _("Only alphabetical characters are allowed here.")
+
+def _isValidDate(date_string):
+ """
+ A helper function used by isValidANSIDate and isValidANSIDatetime to
+ check if the date is valid. The date string is assumed to already be in
+ YYYY-MM-DD format.
+ """
+ from datetime import date
+ # Could use time.strptime here and catch errors, but datetime.date below
+ # produces much friendlier error messages.
+ year, month, day = map(int, date_string.split('-'))
+ try:
+ date(year, month, day)
+ except ValueError, e:
+ msg = _('Invalid date: %s') % _(str(e))
+ raise ValidationError, msg
+
+def isValidANSIDate(field_data, all_data):
+ if not ansi_date_re.search(field_data):
+ raise ValidationError, _('Enter a valid date in YYYY-MM-DD format.')
+ _isValidDate(field_data)
+
+def isValidANSITime(field_data, all_data):
+ if not ansi_time_re.search(field_data):
+ raise ValidationError, _('Enter a valid time in HH:MM format.')
+
+def isValidANSIDatetime(field_data, all_data):
+ if not ansi_datetime_re.search(field_data):
+ raise ValidationError, _('Enter a valid date/time in YYYY-MM-DD HH:MM format.')
+ _isValidDate(field_data.split()[0])
+
+def isValidEmail(field_data, all_data):
+ if not email_re.search(field_data):
+ raise ValidationError, _('Enter a valid e-mail address.')
+
+def isValidImage(field_data, all_data):
+ """
+ Checks that the file-upload field data contains a valid image (GIF, JPG,
+ PNG, possibly others -- whatever the Python Imaging Library supports).
+ """
+ from PIL import Image
+ from cStringIO import StringIO
+ try:
+ content = field_data.read()
+ except TypeError:
+ raise ValidationError, _("No file was submitted. Check the encoding type on the form.")
+ try:
+ # load() is the only method that can spot a truncated JPEG,
+ # but it cannot be called sanely after verify()
+ trial_image = Image.open(StringIO(content))
+ trial_image.load()
+ # verify() is the only method that can spot a corrupt PNG,
+ # but it must be called immediately after the constructor
+ trial_image = Image.open(StringIO(content))
+ trial_image.verify()
+ except Exception: # Python Imaging Library doesn't recognize it as an image
+ raise ValidationError, _("Upload a valid image. The file you uploaded was either not an image or a corrupted image.")
+
+def isValidImageURL(field_data, all_data):
+ uc = URLMimeTypeCheck(('image/jpeg', 'image/gif', 'image/png'))
+ try:
+ uc(field_data, all_data)
+ except URLMimeTypeCheck.InvalidContentType:
+ raise ValidationError, _("The URL %s does not point to a valid image.") % field_data
+
+def isValidPhone(field_data, all_data):
+ if not phone_re.search(field_data):
+ raise ValidationError, _('Phone numbers must be in XXX-XXX-XXXX format. "%s" is invalid.') % field_data
+
+def isValidQuicktimeVideoURL(field_data, all_data):
+ "Checks that the given URL is a video that can be played by QuickTime (qt, mpeg)"
+ uc = URLMimeTypeCheck(('video/quicktime', 'video/mpeg',))
+ try:
+ uc(field_data, all_data)
+ except URLMimeTypeCheck.InvalidContentType:
+ raise ValidationError, _("The URL %s does not point to a valid QuickTime video.") % field_data
+
+def isValidURL(field_data, all_data):
+ if not url_re.search(field_data):
+ raise ValidationError, _("A valid URL is required.")
+
+def isValidHTML(field_data, all_data):
+ import urllib, urllib2
+ try:
+ u = urllib2.urlopen('http://validator.w3.org/check', urllib.urlencode({'fragment': field_data, 'output': 'xml'}))
+ except:
+ # Validator or Internet connection is unavailable. Fail silently.
+ return
+ html_is_valid = (u.headers.get('x-w3c-validator-status', 'Invalid') == 'Valid')
+ if html_is_valid:
+ return
+ from xml.dom.minidom import parseString
+ error_messages = [e.firstChild.wholeText for e in parseString(u.read()).getElementsByTagName('messages')[0].getElementsByTagName('msg')]
+ raise ValidationError, _("Valid HTML is required. Specific errors are:\n%s") % "\n".join(error_messages)
+
+def isWellFormedXml(field_data, all_data):
+ from xml.dom.minidom import parseString
+ try:
+ parseString(field_data)
+ except Exception, e: # Naked except because we're not sure what will be thrown
+ raise ValidationError, _("Badly formed XML: %s") % str(e)
+
+def isWellFormedXmlFragment(field_data, all_data):
+ isWellFormedXml('<root>%s</root>' % field_data, all_data)
+
+def isExistingURL(field_data, all_data):
+ try:
+ headers = {
+ "Accept" : "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
+ "Accept-Language" : "en-us,en;q=0.5",
+ "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
+ "Connection" : "close",
+ "User-Agent": settings.URL_VALIDATOR_USER_AGENT
+ }
+ req = urllib2.Request(field_data,None, headers)
+ u = urllib2.urlopen(req)
+ except ValueError:
+ raise ValidationError, _("Invalid URL: %s") % field_data
+ except urllib2.HTTPError, e:
+ # 401s are valid; they just mean authorization is required.
+ # 301 and 302 are redirects; they just mean look somewhere else.
+ if str(e.code) not in ('401','301','302'):
+ raise ValidationError, _("The URL %s is a broken link.") % field_data
+ except: # urllib2.URLError, httplib.InvalidURL, etc.
+ raise ValidationError, _("The URL %s is a broken link.") % field_data
+
+def isValidUSState(field_data, all_data):
+ "Checks that the given string is a valid two-letter U.S. state abbreviation"
+ states = ['AA', 'AE', 'AK', 'AL', 'AP', 'AR', 'AS', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'FM', 'GA', 'GU', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MH', 'MI', 'MN', 'MO', 'MP', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'PW', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY']
+ if field_data.upper() not in states:
+ raise ValidationError, _("Enter a valid U.S. state abbreviation.")
+
+def hasNoProfanities(field_data, all_data):
+ """
+ Checks that the given string has no profanities in it. This does a simple
+ check for whether each profanity exists within the string, so 'fuck' will
+ catch 'motherfucker' as well. Raises a ValidationError such as:
+ Watch your mouth! The words "f--k" and "s--t" are not allowed here.
+ """
+ field_data = field_data.lower() # normalize
+ words_seen = [w for w in settings.PROFANITIES_LIST if w in field_data]
+ if words_seen:
+ from django.utils.text import get_text_list
+ plural = len(words_seen)
+ raise ValidationError, ungettext("Watch your mouth! The word %s is not allowed here.",
+ "Watch your mouth! The words %s are not allowed here.", plural) % \
+ get_text_list(['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1]) for i in words_seen], _('and'))
+
+class AlwaysMatchesOtherField(object):
+ def __init__(self, other_field_name, error_message=None):
+ self.other = other_field_name
+ self.error_message = error_message or lazy_inter(ugettext_lazy("This field must match the '%s' field."), self.other)
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if field_data != all_data[self.other]:
+ raise ValidationError, self.error_message
+
+class ValidateIfOtherFieldEquals(object):
+ def __init__(self, other_field, other_value, validator_list):
+ self.other_field, self.other_value = other_field, other_value
+ self.validator_list = validator_list
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if self.other_field in all_data and all_data[self.other_field] == self.other_value:
+ for v in self.validator_list:
+ v(field_data, all_data)
+
+class RequiredIfOtherFieldNotGiven(object):
+ def __init__(self, other_field_name, error_message=ugettext_lazy("Please enter something for at least one field.")):
+ self.other, self.error_message = other_field_name, error_message
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if not all_data.get(self.other, False) and not field_data:
+ raise ValidationError, self.error_message
+
+class RequiredIfOtherFieldsGiven(object):
+ def __init__(self, other_field_names, error_message=ugettext_lazy("Please enter both fields or leave them both empty.")):
+ self.other, self.error_message = other_field_names, error_message
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ for field in self.other:
+ if all_data.get(field, False) and not field_data:
+ raise ValidationError, self.error_message
+
+class RequiredIfOtherFieldGiven(RequiredIfOtherFieldsGiven):
+ "Like RequiredIfOtherFieldsGiven, but takes a single field name instead of a list."
+ def __init__(self, other_field_name, error_message=ugettext_lazy("Please enter both fields or leave them both empty.")):
+ RequiredIfOtherFieldsGiven.__init__(self, [other_field_name], error_message)
+
+class RequiredIfOtherFieldEquals(object):
+ def __init__(self, other_field, other_value, error_message=None, other_label=None):
+ self.other_field = other_field
+ self.other_value = other_value
+ other_label = other_label or other_value
+ self.error_message = error_message or lazy_inter(ugettext_lazy("This field must be given if %(field)s is %(value)s"), {
+ 'field': other_field, 'value': other_label})
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if self.other_field in all_data and all_data[self.other_field] == self.other_value and not field_data:
+ raise ValidationError(self.error_message)
+
+class RequiredIfOtherFieldDoesNotEqual(object):
+ def __init__(self, other_field, other_value, other_label=None, error_message=None):
+ self.other_field = other_field
+ self.other_value = other_value
+ other_label = other_label or other_value
+ self.error_message = error_message or lazy_inter(ugettext_lazy("This field must be given if %(field)s is not %(value)s"), {
+ 'field': other_field, 'value': other_label})
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if self.other_field in all_data and all_data[self.other_field] != self.other_value and not field_data:
+ raise ValidationError(self.error_message)
+
+class IsLessThanOtherField(object):
+ def __init__(self, other_field_name, error_message):
+ self.other, self.error_message = other_field_name, error_message
+
+ def __call__(self, field_data, all_data):
+ if field_data > all_data[self.other]:
+ raise ValidationError, self.error_message
+
+class UniqueAmongstFieldsWithPrefix(object):
+ def __init__(self, field_name, prefix, error_message):
+ self.field_name, self.prefix = field_name, prefix
+ self.error_message = error_message or ugettext_lazy("Duplicate values are not allowed.")
+
+ def __call__(self, field_data, all_data):
+ for field_name, value in all_data.items():
+ if field_name != self.field_name and value == field_data:
+ raise ValidationError, self.error_message
+
+class NumberIsInRange(object):
+ """
+ Validator that tests if a value is in a range (inclusive).
+ """
+ def __init__(self, lower=None, upper=None, error_message=''):
+ self.lower, self.upper = lower, upper
+ if not error_message:
+ if lower and upper:
+ self.error_message = _("This value must be between %(lower)s and %(upper)s.") % {'lower': lower, 'upper': upper}
+ elif lower:
+ self.error_message = _("This value must be at least %s.") % lower
+ elif upper:
+ self.error_message = _("This value must be no more than %s.") % upper
+ else:
+ self.error_message = error_message
+
+ def __call__(self, field_data, all_data):
+ # Try to make the value numeric. If this fails, we assume another
+ # validator will catch the problem.
+ try:
+ val = float(field_data)
+ except ValueError:
+ return
+
+ # Now validate
+ if self.lower and self.upper and (val < self.lower or val > self.upper):
+ raise ValidationError(self.error_message)
+ elif self.lower and val < self.lower:
+ raise ValidationError(self.error_message)
+ elif self.upper and val > self.upper:
+ raise ValidationError(self.error_message)
+
+class IsAPowerOf(object):
+ """
+ Usage: If you create an instance of the IsPowerOf validator:
+ v = IsAPowerOf(2)
+
+ The following calls will succeed:
+ v(4, None)
+ v(8, None)
+ v(16, None)
+
+ But this call:
+ v(17, None)
+ will raise "django.core.validators.ValidationError: ['This value must be a power of 2.']"
+ """
+ def __init__(self, power_of):
+ self.power_of = power_of
+
+ def __call__(self, field_data, all_data):
+ from math import log
+ val = log(int(field_data)) / log(self.power_of)
+ if val != int(val):
+ raise ValidationError, _("This value must be a power of %s.") % self.power_of
+
+class IsValidDecimal(object):
+ def __init__(self, max_digits, decimal_places):
+ self.max_digits, self.decimal_places = max_digits, decimal_places
+
+ def __call__(self, field_data, all_data):
+ try:
+ val = Decimal(field_data)
+ except DecimalException:
+ raise ValidationError, _("Please enter a valid decimal number.")
+
+ pieces = str(val).lstrip("-").split('.')
+ decimals = (len(pieces) == 2) and len(pieces[1]) or 0
+ digits = len(pieces[0])
+
+ if digits + decimals > self.max_digits:
+ raise ValidationError, ungettext("Please enter a valid decimal number with at most %s total digit.",
+ "Please enter a valid decimal number with at most %s total digits.", self.max_digits) % self.max_digits
+ if digits > (self.max_digits - self.decimal_places):
+ raise ValidationError, ungettext( "Please enter a valid decimal number with a whole part of at most %s digit.",
+ "Please enter a valid decimal number with a whole part of at most %s digits.", str(self.max_digits-self.decimal_places)) % str(self.max_digits-self.decimal_places)
+ if decimals > self.decimal_places:
+ raise ValidationError, ungettext("Please enter a valid decimal number with at most %s decimal place.",
+ "Please enter a valid decimal number with at most %s decimal places.", self.decimal_places) % self.decimal_places
+
+def isValidFloat(field_data, all_data):
+ data = smart_str(field_data)
+ try:
+ float(data)
+ except ValueError:
+ raise ValidationError, _("Please enter a valid floating point number.")
+
+class HasAllowableSize(object):
+ """
+ Checks that the file-upload field data is a certain size. min_size and
+ max_size are measurements in bytes.
+ """
+ def __init__(self, min_size=None, max_size=None, min_error_message=None, max_error_message=None):
+ self.min_size, self.max_size = min_size, max_size
+ self.min_error_message = min_error_message or lazy_inter(ugettext_lazy("Make sure your uploaded file is at least %s bytes big."), min_size)
+ self.max_error_message = max_error_message or lazy_inter(ugettext_lazy("Make sure your uploaded file is at most %s bytes big."), max_size)
+
+ def __call__(self, field_data, all_data):
+ try:
+ content = field_data.read()
+ except TypeError:
+ raise ValidationError, ugettext_lazy("No file was submitted. Check the encoding type on the form.")
+ if self.min_size is not None and len(content) < self.min_size:
+ raise ValidationError, self.min_error_message
+ if self.max_size is not None and len(content) > self.max_size:
+ raise ValidationError, self.max_error_message
+
+class MatchesRegularExpression(object):
+ """
+ Checks that the field matches the given regular-expression. The regex
+ should be in string format, not already compiled.
+ """
+ def __init__(self, regexp, error_message=ugettext_lazy("The format for this field is wrong.")):
+ self.regexp = re.compile(regexp)
+ self.error_message = error_message
+
+ def __call__(self, field_data, all_data):
+ if not self.regexp.search(field_data):
+ raise ValidationError(self.error_message)
+
+class AnyValidator(object):
+ """
+ This validator tries all given validators. If any one of them succeeds,
+ validation passes. If none of them succeeds, the given message is thrown
+ as a validation error. The message is rather unspecific, so it's best to
+ specify one on instantiation.
+ """
+ def __init__(self, validator_list=None, error_message=ugettext_lazy("This field is invalid.")):
+ if validator_list is None: validator_list = []
+ self.validator_list = validator_list
+ self.error_message = error_message
+ for v in validator_list:
+ if hasattr(v, 'always_test'):
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ for v in self.validator_list:
+ try:
+ v(field_data, all_data)
+ return
+ except ValidationError, e:
+ pass
+ raise ValidationError(self.error_message)
+
+class URLMimeTypeCheck(object):
+ "Checks that the provided URL points to a document with a listed mime type"
+ class CouldNotRetrieve(ValidationError):
+ pass
+ class InvalidContentType(ValidationError):
+ pass
+
+ def __init__(self, mime_type_list):
+ self.mime_type_list = mime_type_list
+
+ def __call__(self, field_data, all_data):
+ import urllib2
+ try:
+ isValidURL(field_data, all_data)
+ except ValidationError:
+ raise
+ try:
+ info = urllib2.urlopen(field_data).info()
+ except (urllib2.HTTPError, urllib2.URLError):
+ raise URLMimeTypeCheck.CouldNotRetrieve, _("Could not retrieve anything from %s.") % field_data
+ content_type = info['content-type']
+ if content_type not in self.mime_type_list:
+ raise URLMimeTypeCheck.InvalidContentType, _("The URL %(url)s returned the invalid Content-Type header '%(contenttype)s'.") % {
+ 'url': field_data, 'contenttype': content_type}
+
+class RelaxNGCompact(object):
+ "Validate against a Relax NG compact schema"
+ def __init__(self, schema_path, additional_root_element=None):
+ self.schema_path = schema_path
+ self.additional_root_element = additional_root_element
+
+ def __call__(self, field_data, all_data):
+ import os, tempfile
+ if self.additional_root_element:
+ field_data = '<%(are)s>%(data)s\n</%(are)s>' % {
+ 'are': self.additional_root_element,
+ 'data': field_data
+ }
+ filename = tempfile.mktemp() # Insecure, but nothing else worked
+ fp = open(filename, 'w')
+ fp.write(field_data)
+ fp.close()
+ if not os.path.exists(settings.JING_PATH):
+ raise Exception, "%s not found!" % settings.JING_PATH
+ p = os.popen('%s -c %s %s' % (settings.JING_PATH, self.schema_path, filename))
+ errors = [line.strip() for line in p.readlines()]
+ p.close()
+ os.unlink(filename)
+ display_errors = []
+ lines = field_data.split('\n')
+ for error in errors:
+ ignored, line, level, message = error.split(':', 3)
+ # Scrape the Jing error messages to reword them more nicely.
+ m = re.search(r'Expected "(.*?)" to terminate element starting on line (\d+)', message)
+ if m:
+ display_errors.append(_('Please close the unclosed %(tag)s tag from line %(line)s. (Line starts with "%(start)s".)') % \
+ {'tag':m.group(1).replace('/', ''), 'line':m.group(2), 'start':lines[int(m.group(2)) - 1][:30]})
+ continue
+ if message.strip() == 'text not allowed here':
+ display_errors.append(_('Some text starting on line %(line)s is not allowed in that context. (Line starts with "%(start)s".)') % \
+ {'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*attribute "(.*?)" not allowed at this point; ignored', message)
+ if m:
+ display_errors.append(_('"%(attr)s" on line %(line)s is an invalid attribute. (Line starts with "%(start)s".)') % \
+ {'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*unknown element "(.*?)"', message)
+ if m:
+ display_errors.append(_('"<%(tag)s>" on line %(line)s is an invalid tag. (Line starts with "%(start)s".)') % \
+ {'tag':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ if message.strip() == 'required attributes missing':
+ display_errors.append(_('A tag on line %(line)s is missing one or more required attributes. (Line starts with "%(start)s".)') % \
+ {'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*bad value for attribute "(.*?)"', message)
+ if m:
+ display_errors.append(_('The "%(attr)s" attribute on line %(line)s has an invalid value. (Line starts with "%(start)s".)') % \
+ {'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ # Failing all those checks, use the default error message.
+ display_error = 'Line %s: %s [%s]' % (line, message, level.strip())
+ display_errors.append(display_error)
+ if len(display_errors) > 0:
+ raise ValidationError, display_errors
diff --git a/webapp/django/core/xheaders.py b/webapp/django/core/xheaders.py
new file mode 100644
index 0000000000..34335f12c5
--- /dev/null
+++ b/webapp/django/core/xheaders.py
@@ -0,0 +1,24 @@
+"""
+Pages in Django can are served up with custom HTTP headers containing useful
+information about those pages -- namely, the content type and object ID.
+
+This module contains utility functions for retrieving and doing interesting
+things with these special "X-Headers" (so called because the HTTP spec demands
+that custom headers are prefixed with "X-").
+
+Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
+"""
+
+def populate_xheaders(request, response, model, object_id):
+ """
+ Adds the "X-Object-Type" and "X-Object-Id" headers to the given
+ HttpResponse according to the given model and object_id -- but only if the
+ given HttpRequest object has an IP address within the INTERNAL_IPS setting
+ or if the request is from a logged in staff member.
+ """
+ from django.conf import settings
+ if (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
+ or (hasattr(request, 'user') and request.user.is_authenticated()
+ and request.user.is_staff)):
+ response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
+ response['X-Object-Id'] = str(object_id)