diff --git a/djangoappengine/boot.py b/djangoappengine/boot.py index c0b74b9..04319e5 100644 --- a/djangoappengine/boot.py +++ b/djangoappengine/boot.py @@ -2,6 +2,13 @@ import os import sys +global devappserver_ver +try: + import force_devappserver + devappserver_ver = force_devappserver.devappserver_ver +except: + devappserver_ver = 1 + def find_project_dir(): """ Go through the path, and look for manage.py @@ -70,6 +77,10 @@ def setup_env(): # Then call fix_sys_path from the SDK from dev_appserver import fix_sys_path + if devappserver_ver == 2: + # emulate dev_appserver._run_file in devappserver2 + from dev_appserver import _SYS_PATH_ADDITIONS + sys.path = _SYS_PATH_ADDITIONS['_python_runtime.py'] + sys.path fix_sys_path() setup_project() @@ -146,41 +157,50 @@ def setup_project(): # enable https connections (seem to be broken on Windows because # the _ssl module is disallowed). if not have_appserver: - from google.appengine.tools import dev_appserver - try: - # Backup os.environ. It gets overwritten by the - # dev_appserver, but it's needed by the subprocess module. - env = dev_appserver.DEFAULT_ENV - dev_appserver.DEFAULT_ENV = os.environ.copy() - dev_appserver.DEFAULT_ENV.update(env) - # Backup the buffer() builtin. The subprocess in Python 2.5 - # on Linux and OS X uses needs it, but the dev_appserver - # removes it. - dev_appserver.buffer = buffer - except AttributeError: - logging.warn("Could not patch the default environment. " - "The subprocess module will not work correctly.") - - try: - # Allow importing compiler/parser, _ssl (for https), - # _io for Python 2.7 io support on OS X - dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend( - ('parser', '_ssl', '_io')) - except AttributeError: - logging.warn("Could not patch modules whitelist. the compiler " - "and parser modules will not work and SSL support " - "is disabled.") - elif not on_production_server: - try: - # Restore the real subprocess module. - from google.appengine.api.mail_stub import subprocess - sys.modules['subprocess'] = subprocess - # Re-inject the buffer() builtin into the subprocess module. + if devappserver_ver == 1: from google.appengine.tools import dev_appserver - subprocess.buffer = dev_appserver.buffer - except Exception, e: - logging.warn("Could not add the subprocess module to the " - "sandbox: %s" % e) + try: + # Backup os.environ. It gets overwritten by the + # dev_appserver, but it's needed by the subprocess module. + env = dev_appserver.DEFAULT_ENV + dev_appserver.DEFAULT_ENV = os.environ.copy() + dev_appserver.DEFAULT_ENV.update(env) + # Backup the buffer() builtin. The subprocess in Python 2.5 + # on Linux and OS X uses needs it, but the dev_appserver + # removes it. + dev_appserver.buffer = buffer + except AttributeError: + logging.warn("Could not patch the default environment. " + "The subprocess module will not work correctly.") + + try: + # Allow importing compiler/parser, _ssl (for https), + # _io for Python 2.7 io support on OS X + dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend( + ('parser', '_ssl', '_io')) + except AttributeError: + logging.warn("Could not patch modules whitelist. the compiler " + "and parser modules will not work and SSL support " + "is disabled.") + # In SDK 1.6.4, the datastore doesn't save automatically on exit. + # Register a handler to make sure we save. This is important on + # manage.py commands other than 'runserver'. Note that with runserver, + # the datastore is flushed twice. This should be acceptable. + import atexit + if hasattr(dev_appserver, 'TearDownStubs'): + atexit.register(dev_appserver.TearDownStubs) + elif not on_production_server: + if devappserver_ver == 1: + try: + # Restore the real subprocess module. + from google.appengine.tools import dev_appserver + from google.appengine.api.mail_stub import subprocess + sys.modules['subprocess'] = subprocess + # Re-inject the buffer() builtin into the subprocess module. + subprocess.buffer = dev_appserver.buffer + except Exception, e: + logging.warn("Could not add the subprocess module to the " + "sandbox: %s" % e) os.environ.update(env_ext) @@ -202,3 +222,4 @@ def setup_project(): while path in sys.path: sys.path.remove(path) sys.path = extra_paths + sys.path + diff --git a/djangoappengine/db/base.py b/djangoappengine/db/base.py index ce5179e..c8c9e84 100644 --- a/djangoappengine/db/base.py +++ b/djangoappengine/db/base.py @@ -155,7 +155,8 @@ def _value_for_db(self, value, field, field_kind, db_type, lookup): if db_type == 'key': # value = self._value_for_db_key(value, field_kind) try: - value = key_from_path(field.model._meta.db_table, value) + if not isinstance(value, Key): + value = key_from_path(field.model._meta.db_table, value) except (BadArgumentError, BadValueError,): raise DatabaseError("Only strings and positive integers " "may be used as keys on GAE.") @@ -169,9 +170,9 @@ def _value_for_db(self, value, field, field_kind, db_type, lookup): # Store all date / time values as datetimes, by using some # default time or date. - elif db_type == 'date': + elif db_type == 'date' and isinstance(value, datetime.date): value = datetime.datetime.combine(value, self.DEFAULT_TIME) - elif db_type == 'time': + elif db_type == 'time' and isinstance(value, datetime.time): value = datetime.datetime.combine(self.DEFAULT_DATE, value) # Store BlobField, DictField and EmbeddedModelField values as Blobs. @@ -207,9 +208,9 @@ def _value_from_db(self, value, field, field_kind, db_type): value = unicode(value) # Dates and times are stored as datetimes, drop the added part. - elif db_type == 'date': + elif db_type == 'date' and isinstance(value, datetime.datetime): value = value.date() - elif db_type == 'time': + elif db_type == 'time' and isinstance(value, datetime.datetime): value = value.time() # Convert GAE Blobs to plain strings for Django. diff --git a/djangoappengine/db/compiler.py b/djangoappengine/db/compiler.py index cad752d..7b340bc 100644 --- a/djangoappengine/db/compiler.py +++ b/djangoappengine/db/compiler.py @@ -23,7 +23,7 @@ from .db_settings import get_model_indexes from .expressions import ExpressionEvaluator -from .utils import commit_locked +from .utils import AncestorKey, commit_locked # Valid query types (a dictionary is used for speedy lookups). @@ -87,6 +87,7 @@ def __init__(self, compiler, fields): self.included_pks = None self.excluded_pks = () self.has_negated_exact_filter = False + self.ancestor_key = None self.ordering = [] self.db_table = self.query.get_meta().db_table self.pks_only = (len(fields) == 1 and fields[0].primary_key) @@ -197,6 +198,14 @@ def add_filter(self, field, lookup_type, negated, value): # Optimization: batch-get by key; this is only suitable for # primary keys, not for anything that uses the key type. if field.primary_key and lookup_type in ('exact', 'in'): + if lookup_type == 'exact' and isinstance(value, AncestorKey): + if negated: + raise DatabaseError("You can't negate an ancestor operator.") + if self.ancestor_key is not None: + raise DatabaseError("You can't use more than one ancestor operator.") + self.ancestor_key = value.key + return + if self.included_pks is not None: raise DatabaseError("You can't apply multiple AND " "filters on the primary key. " @@ -318,6 +327,8 @@ def _make_entity(self, entity): def _build_query(self): for query in self.gae_query: query.Order(*self.ordering) + if self.ancestor_key: + query.Ancestor(self.ancestor_key) if len(self.gae_query) > 1: return MultiQuery(self.gae_query, self.ordering) return self.gae_query[0] @@ -403,6 +414,7 @@ def insert(self, data_list, return_id=False): if value is not None: kwds['id'] = value.id() kwds['name'] = value.name() + kwds['parent'] = value.parent() # GAE does not store empty lists (and even does not allow # passing empty lists to Entity.update) so skip them. diff --git a/djangoappengine/db/creation.py b/djangoappengine/db/creation.py index a6071ff..17d504c 100644 --- a/djangoappengine/db/creation.py +++ b/djangoappengine/db/creation.py @@ -32,6 +32,13 @@ def db_type(self, field): field is to be indexed, and the "text" db_type (db.Text) if it's registered as unindexed. """ + from djangoappengine.fields import DbKeyField + + # DBKeyField reads/stores db.Key objects directly + # so its treated as a special case + if isinstance(field, DbKeyField): + return field.db_type(connection=self.connection) + if self.connection.settings_dict.get('STORE_RELATIONS_AS_DB_KEYS'): if field.primary_key or field.rel is not None: return 'key' diff --git a/djangoappengine/db/models/__init__.py b/djangoappengine/db/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/djangoappengine/db/models/manager.py b/djangoappengine/db/models/manager.py new file mode 100644 index 0000000..5b5e732 --- /dev/null +++ b/djangoappengine/db/models/manager.py @@ -0,0 +1,14 @@ +from django.db.models import Manager as _baseManager +from djangoappengine.db.utils import as_ancestor +from djangoappengine.db.models.query import QuerySet + +class Manager(_baseManager): + + def get_query_set(self): + """Returns a new QuerySet object. Subclasses can override this method + to easily customize the behavior of the Manager. + """ + return QuerySet(self.model, using=self._db) + + def ancestor(self, ancestor): + return self.get_query_set().ancestor(ancestor) \ No newline at end of file diff --git a/djangoappengine/db/models/query.py b/djangoappengine/db/models/query.py new file mode 100644 index 0000000..d81c661 --- /dev/null +++ b/djangoappengine/db/models/query.py @@ -0,0 +1,17 @@ +from django.db.models.query import QuerySet as _baseQuerySet +from djangoappengine.db.utils import as_ancestor + +class QuerySet(_baseQuerySet): + def ancestor(self, ancestor): + """ + Returns a new QuerySet instance with the args ANDed to the existing + set. + """ + return self._filter_or_exclude(False, pk=as_ancestor(ancestor)) + +class EmptyQuerySet(QuerySet): + def ancestor(self, *args, **kwargs): + """ + Always returns EmptyQuerySet. + """ + return self \ No newline at end of file diff --git a/djangoappengine/db/stubs.py b/djangoappengine/db/stubs.py index 9180e43..02583fc 100644 --- a/djangoappengine/db/stubs.py +++ b/djangoappengine/db/stubs.py @@ -3,7 +3,7 @@ import time from urllib2 import HTTPError, URLError -from ..boot import PROJECT_DIR +from ..boot import PROJECT_DIR, devappserver_ver from ..utils import appid, have_appserver @@ -62,9 +62,13 @@ def activate_test_stubs(self, connection): self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(auto_task_running=True, root_path=PROJECT_DIR) self.testbed.init_urlfetch_stub() - self.testbed.init_user_stub() + self.testbed.init_user_stub(True, **{ 'logout_url' : '/_ah/login?continue=%s&action=Logout', + 'login_url' : '/_ah/login?continue=%s' }) self.testbed.init_xmpp_stub() self.testbed.init_channel_stub() + self.testbed.init_files_stub(True) + self.testbed.init_blobstore_stub(True) + self.testbed.init_images_stub(True) def deactivate_test_stubs(self): if self.active_stubs == 'test': @@ -82,9 +86,17 @@ def setup_local_stubs(self, connection): log_level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.WARNING) from google.appengine.tools import dev_appserver - dev_appserver.SetupStubs('dev~' + appid, **args) + dev_appserver.SetupStubs('dev~' + appid, + _use_atexit_for_datastore_stub=True, + **args) logging.getLogger().setLevel(log_level) self.active_stubs = 'local' + if devappserver_ver == 2: + # Mimic google.appengine.tools.devappserver2 + os.environ['TZ'] = 'UTC' + if hasattr(time, 'tzset'): + # time.tzet() should be called on Unix, but doesn't exist on Windows. + time.tzset() def setup_remote_stubs(self, connection): if self.active_stubs == 'remote': diff --git a/djangoappengine/db/utils.py b/djangoappengine/db/utils.py index 1865f9c..08e1b5f 100644 --- a/djangoappengine/db/utils.py +++ b/djangoappengine/db/utils.py @@ -1,14 +1,17 @@ -from django.db import DEFAULT_DB_ALIAS - +from google.appengine.api.datastore import Key from google.appengine.datastore.datastore_query import Cursor +from django.db import models, DEFAULT_DB_ALIAS try: from functools import wraps except ImportError: from django.utils.functional import wraps # Python 2.3, 2.4 fallback. + class CursorQueryMixin(object): + def clone(self, *args, **kwargs): + kwargs['_gae_cursor'] = getattr(self, '_gae_cursor', None) kwargs['_gae_start_cursor'] = getattr(self, '_gae_start_cursor', None) kwargs['_gae_end_cursor'] = getattr(self, '_gae_end_cursor', None) kwargs['_gae_config'] = getattr(self, '_gae_config', None) @@ -90,3 +93,36 @@ def _commit_locked(*args, **kw): if callable(func_or_using): return inner_commit_locked(func_or_using, DEFAULT_DB_ALIAS) return lambda func: inner_commit_locked(func, func_or_using) + +class AncestorKey(object): + def __init__(self, key): + self.key = key + +def as_ancestor(key_or_model): + if key_or_model is None: + raise ValueError("key_or_model must not be None") + + if isinstance(key_or_model, models.Model): + key_or_model = Key.from_path(key_or_model._meta.db_table, key_or_model.pk) + + return AncestorKey(key_or_model) + +def make_key(*args, **kwargs): + parent = kwargs.pop('parent', None) + + if kwargs: + raise AssertionError('Excess keyword arguments; received %s' % kwargs) + + if not args or len(args) % 2: + raise AssertionError('A non-zero even number of positional arguments is required; received %s' % args) + + if isinstance(parent, models.Model): + parent = Key.from_path(parent._meta.db_table, parent.pk) + + converted_args = [] + for i in xrange(0, len(args), 2): + model, id_or_name = args[i:i+2] + converted_args.extend((model._meta.db_table, id_or_name)) + + newkwargs = { 'parent' : parent } + return Key.from_path(*converted_args, **newkwargs) diff --git a/djangoappengine/fields.py b/djangoappengine/fields.py new file mode 100644 index 0000000..54197d5 --- /dev/null +++ b/djangoappengine/fields.py @@ -0,0 +1,87 @@ +from django.core.exceptions import ValidationError +from django.db import models +from django.utils.encoding import smart_unicode + +from djangoappengine.db.utils import AncestorKey + +from google.appengine.api.datastore import Key, datastore_errors + +import logging + +class DbKeyField(models.Field): + description = "A field for native database key objects" + __metaclass__ = models.SubfieldBase + + def __init__(self, *args, **kwargs): + kwargs['blank'] = True + + self.parent_key_attname = kwargs.pop('parent_key_name', None) + + if self.parent_key_attname is not None and kwargs.get('primary_key', None) is None: + raise ValueError("Primary key must be true to use parent_key_name") + + super(DbKeyField, self).__init__(*args, **kwargs) + + def contribute_to_class(self, cls, name): + if self.primary_key: + assert not cls._meta.has_auto_field, "A model can't have more than one auto field." + cls._meta.has_auto_field = True + cls._meta.auto_field = self + + if self.parent_key_attname is not None: + def get_parent_key(instance, instance_type=None): + if instance is None: + return self + + return instance.__dict__.get(self.parent_key_attname) + + def set_parent_key(instance, value): + if instance is None: + raise AttributeError("Attribute must be accessed via instance") + + if not isinstance(value, Key): + raise ValueError("'%s' must be a Key" % self.parent_key_attname) + + instance.__dict__[self.parent_key_attname] = value + + setattr(cls, self.parent_key_attname, property(get_parent_key, set_parent_key)) + + super(DbKeyField, self).contribute_to_class(cls, name) + + def to_python(self, value): + if value is None: + return None + if isinstance(value, Key): + return value + if isinstance(value, basestring): + if len(value) == 0: + return None + + try: + return Key(encoded=value) + except datastore_errors.BadKeyError: + return Key.from_path(self.model._meta.db_table, long(value)) + if isinstance(value, (int, long)): + return Key.from_path(self.model._meta.db_table, value) + + raise ValidationError("DbKeyField does not accept %s" % type(value)) + + def get_prep_value(self, value): + if isinstance(value, AncestorKey): + return value + return self.to_python(value) + + def pre_save(self, model_instance, add): + value = super(DbKeyField, self).pre_save(model_instance, add) + + if add and value is None and self.parent_key_attname is not None and hasattr(model_instance, self.parent_key_attname): + stashed_parent = getattr(model_instance, self.parent_key_attname) + value = Key.from_path(self.model._meta.db_table, 0, parent=stashed_parent) + + return value + + def formfield(self, **kwargs): + return None + + def value_to_string(self, obj): + return smart_unicode(self._get_val_from_obj(obj)) diff --git a/djangoappengine/main/main.py b/djangoappengine/main/main.py index 9ee5251..9d92334 100644 --- a/djangoappengine/main/main.py +++ b/djangoappengine/main/main.py @@ -71,7 +71,14 @@ def make_profileable(func): return lambda: profile_main(func) return func + +import webapp2 + +app = webapp2.WSGIApplication([('/', application)]) + +''' main = make_profileable(real_main) if __name__ == '__main__': main() +''' diff --git a/djangoappengine/management/commands/deploy.py b/djangoappengine/management/commands/deploy.py index dcc250e..303502f 100644 --- a/djangoappengine/management/commands/deploy.py +++ b/djangoappengine/management/commands/deploy.py @@ -29,8 +29,6 @@ def run_appcfg(argv): new_args = argv[:] new_args[1] = 'update' - if appconfig.runtime != 'python': - new_args.insert(1, '-R') new_args.append(PROJECT_DIR) syncdb = True if '--nosyncdb' in new_args: diff --git a/djangoappengine/management/commands/loaddata.py b/djangoappengine/management/commands/loaddata.py new file mode 100644 index 0000000..8b7285d --- /dev/null +++ b/djangoappengine/management/commands/loaddata.py @@ -0,0 +1,25 @@ +from django.core.management.commands.loaddata import Command as OriginalCommand + +from google.appengine.api import apiproxy_stub_map +from google.appengine.datastore import datastore_stub_util + +class Command(OriginalCommand): + def handle(self, *fixture_labels, **options): + retval = None + # Temporarily change consistency policy to force apply loaded data + datastore = apiproxy_stub_map.apiproxy.GetStub('datastore_v3') + + try: + # datastore._consistency_policy only exists in dev_appserver + # will throw exception in production + orig_consistency_policy = datastore._consistency_policy + datastore.SetConsistencyPolicy(datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1)) + except: + orig_consistency_policy = None + + retval = super(Command, self).handle(*fixture_labels, **options) + + if orig_consistency_policy: + datastore.SetConsistencyPolicy(orig_consistency_policy) + + return retval diff --git a/djangoappengine/management/commands/runserver.py b/djangoappengine/management/commands/runserver.py index cb60166..4765074 100644 --- a/djangoappengine/management/commands/runserver.py +++ b/djangoappengine/management/commands/runserver.py @@ -5,9 +5,20 @@ from django.db import connections from django.core.management.base import BaseCommand from django.core.management.commands.runserver import BaseRunserverCommand -from django.core.exceptions import ImproperlyConfigured -from google.appengine.tools import dev_appserver_main +from ...boot import devappserver_ver + +if devappserver_ver == 1: + from google.appengine.tools import dev_appserver_main +elif devappserver_ver == 2: + import os + import google + sys.argv[0] = os.path.join( + os.path.dirname(os.path.dirname(google.__file__)), + "devappserver2.py") + # The following import sets the path for _python_runtime.py from + # sys.argv[0], so we need to hack sys.argv[0] before this import + from google.appengine.tools.devappserver2 import devappserver2 from ...boot import PROJECT_DIR from ...db.base import DatabaseWrapper, get_datastore_paths @@ -24,6 +35,10 @@ class Command(BaseRunserverCommand): """ option_list = BaseCommand.option_list + ( + make_option( + '--auto_id_policy', + help="Dictate how automatic IDs are assigned by the datastore " \ + "stub. 'sequential' or 'scattered'."), make_option( '--debug', action='store_true', default=False, help="Prints verbose debugging messages to the console while " \ @@ -106,7 +121,10 @@ def create_parser(self, prog_name, subcommand): parse the arguments to this command. """ # Hack __main__ so --help in dev_appserver_main works OK. - sys.modules['__main__'] = dev_appserver_main + if devappserver_ver == 1: + sys.modules['__main__'] = dev_appserver_main + else: + sys.modules['__main__'] = devappserver2 return super(Command, self).create_parser(prog_name, subcommand) def run_from_argv(self, argv): @@ -130,7 +148,10 @@ def run(self, *args, **options): args = [] # Set bind ip/port if specified. if self.addr: - args.extend(['--address', self.addr]) + if devappserver_ver == 1: + args.extend(['--address', self.addr]) + else: + args.extend(['--host', self.addr]) if self.port: args.extend(['--port', self.port]) @@ -169,17 +190,23 @@ def run(self, *args, **options): break # Process the rest of the options here. - bool_options = [ - 'debug', 'debug_imports', 'clear_datastore', 'require_indexes', - 'high_replication', 'enable_sendmail', 'use_sqlite', - 'allow_skipped_files', 'disable_task_running', ] + if devappserver_ver == 1: + bool_options = [ + 'debug', 'debug_imports', 'clear_datastore', 'require_indexes', + 'high_replication', 'enable_sendmail', 'use_sqlite', + 'allow_skipped_files', 'disable_task_running', ] + else: + bool_options = [ + 'debug', 'debug_imports', 'clear_datastore', 'require_indexes', + 'enable_sendmail', + 'allow_skipped_files', 'disable_task_running', ] for opt in bool_options: if options[opt] != False: args.append('--%s' % opt) str_options = [ 'datastore_path', 'blobstore_path', 'history_path', 'login_url', 'smtp_host', - 'smtp_port', 'smtp_user', 'smtp_password', ] + 'smtp_port', 'smtp_user', 'smtp_password', 'auto_id_policy'] for opt in str_options: if options.get(opt, None) != None: args.extend(['--%s' % opt, options[opt]]) @@ -199,4 +226,15 @@ def run(self, *args, **options): logging.getLogger().setLevel(logging.INFO) # Append the current working directory to the arguments. - dev_appserver_main.main([self.progname] + args + [PROJECT_DIR]) + if devappserver_ver == 1: + dev_appserver_main.main([self.progname] + args + [PROJECT_DIR]) + else: + from google.appengine.api import apiproxy_stub_map + from google.appengine.tools.devappserver2 import devappserver2 + + # Environment is set in djangoappengine.stubs.setup_local_stubs() + # We need to do this effectively reset the stubs. + apiproxy_stub_map.apiproxy = apiproxy_stub_map.GetDefaultAPIProxy() + + sys.argv = ['/home/user/google_appengine/devappserver2.py'] + args + [PROJECT_DIR] + devappserver2.main() diff --git a/djangoappengine/management/commands/test.py b/djangoappengine/management/commands/test.py new file mode 100644 index 0000000..ae12d84 --- /dev/null +++ b/djangoappengine/management/commands/test.py @@ -0,0 +1,52 @@ +from django.core.management.commands.test import Command as OriginalCommand + +from django.test import client + +from google.appengine.api import files +from google.appengine.ext.blobstore import BlobInfo + +original_encode_file = client.encode_file + +def my_encode_file(boundary, key, file): + # encode_file with blobstore support. + # Expecting something like this in the test: + + ''' + from google.appengine.api import files + fn = files.blobstore.create(mime_type="image/jpg", _blobinfo_uploaded_filename="foo.jpg") + with files.open(fn, 'a') as fp: + fp.write("bar") + files.finalize(fn) + + with files.open(fn) as fp: + response = self.client.post('/viewurl', {"fileparam" : fp}) + ''' + if hasattr(file, "_filename"): + blob_key = files.blobstore.get_blob_key(file._filename) + blobinfo = BlobInfo.get(blob_key) + return [ + '--' + boundary, + 'Content-Type: message/external-body; blob-key=%s; access-type="X-AppEngine-BlobKey"' % blob_key, + 'MIME-Version: 1.0', + 'Content-Disposition: form-data; name="%s"; filename="%s"' % (key, blobinfo.filename), + '', + 'Content-Type: %s' % blobinfo.content_type + ] + else: + return original_encode_file(boundary, key, file) + +class Command(OriginalCommand): + def __init__(self): + # Add a non-null version for DEFAULT_VERSION_HOSTNAME + # or else the mapreduce library's handler function + # throws an exception. This only prevents the exception. + import os + default_version_hostname = "mr-test-support.appspot.com" + if "DEFAULT_VERSION_HOSTNAME" not in os.environ: + os.environ["DEFAULT_VERSION_HOSTNAME"] = ( + default_version_hostname) + + # monkey patch client's encode_file with our own + # with blobstore support + client.encode_file = my_encode_file + super(Command, self).__init__() diff --git a/djangoappengine/management/commands/testserver.py b/djangoappengine/management/commands/testserver.py index cc0a8b0..738435f 100644 --- a/djangoappengine/management/commands/testserver.py +++ b/djangoappengine/management/commands/testserver.py @@ -1,8 +1,5 @@ from django.core.management.base import BaseCommand -from google.appengine.api import apiproxy_stub_map -from google.appengine.datastore import datastore_stub_util - from optparse import make_option class Command(BaseCommand): @@ -47,18 +44,9 @@ def handle(self, *fixture_labels, **options): db_name = name break - # Temporarily change consistency policy to force apply loaded data - datastore = apiproxy_stub_map.apiproxy.GetStub('datastore_v3') - - orig_consistency_policy = datastore._consistency_policy - datastore.SetConsistencyPolicy(datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1)) - # Import the fixture data into the test database. call_command('loaddata', *fixture_labels, **{'verbosity': verbosity}) - # reset original policy - datastore.SetConsistencyPolicy(orig_consistency_policy) - # Run the development server. Turn off auto-reloading because it causes # a strange error -- it causes this handle() method to be called # multiple times. diff --git a/djangoappengine/mapreduce/input_readers.py b/djangoappengine/mapreduce/input_readers.py index fb2740e..f6b9fbc 100644 --- a/djangoappengine/mapreduce/input_readers.py +++ b/djangoappengine/mapreduce/input_readers.py @@ -1,11 +1,51 @@ from djangoappengine.db.utils import get_cursor, set_cursor, set_config +from django.db.models.sql.query import Query -from google.appengine.api.datastore import Key +from google.appengine.datastore import datastore_query from mapreduce.datastore_range_iterators import AbstractKeyRangeIterator, _KEY_RANGE_ITERATORS -from mapreduce.input_readers import AbstractDatastoreInputReader, _get_params, BadReaderParamsError +from mapreduce.input_readers import AbstractDatastoreInputReader, RawDatastoreInputReader, _get_params, BadReaderParamsError from mapreduce import util +class DjangoKeyIterator(AbstractKeyRangeIterator): + """An iterator that takes a Django model ('app.models.Model') and yields Keys for that model""" + _KEYS_ONLY = True + def __iter__(self): + query = Query(util.for_name(self._query_spec.model_class_path)).get_compiler(using="default").build_query() + raw_entity_kind = query.db_table + + q = self._key_range.make_ascending_datastore_query(raw_entity_kind, keys_only=self._KEYS_ONLY) + if self._cursor: + q = set_cursor(q, self._cursor) + + self._query = q + for key in q.Run( + config=datastore_query.QueryOptions(batch_size=self._query_spec.batch_size)): + yield key + + def _get_cursor(self): + if self._query is not None: + return self._query.cursor() + +_KEY_RANGE_ITERATORS[DjangoKeyIterator.__name__] = DjangoKeyIterator + +class DjangoKeyInputReader(RawDatastoreInputReader): + """An input reader that takes a Django model ('app.models.Model') and yields keys for that model""" + _KEY_RANGE_ITER_CLS = DjangoKeyIterator + + +class DjangoRawEntityIterator(DjangoKeyIterator): + """An iterator that takes a Django model ('app.models.Model') and yields raw entities for that model""" + _KEYS_ONLY = False + +_KEY_RANGE_ITERATORS[DjangoRawEntityIterator.__name__] = DjangoRawEntityIterator + + +class DjangoRawEntityInputReader(RawDatastoreInputReader): + """An input reader that takes a Django model ('app.models.Model') and yields raw entities for that model""" + _KEY_RANGE_ITER_CLS = DjangoRawEntityIterator + + class DjangoModelIterator(AbstractKeyRangeIterator): def __iter__(self): k_range = self._key_range diff --git a/djangoappengine/storage.py b/djangoappengine/storage.py index 030b285..1511368 100644 --- a/djangoappengine/storage.py +++ b/djangoappengine/storage.py @@ -135,6 +135,12 @@ def __init__(self, name, mode, storage): self._mode = mode self.blobstore_info = storage._get_blobinfo(name) + def __str__(self): + return self.blobstore_info.filename + + def __unicode__(self): + return self.blobstore_info.filename + @property def size(self): return self.blobstore_info.size @@ -148,6 +154,13 @@ def file(self): self._file = BlobReader(self.blobstore_info.key()) return self._file + @property + def url(self): + try: + return get_serving_url(self.blobstore_info.key()) + except NotImageError: + return None + class BlobstoreFileUploadHandler(FileUploadHandler): """ @@ -205,3 +218,10 @@ def chunks(self, chunk_size=1024 * 128): def multiple_chunks(self, chunk_size=1024 * 128): return True + + @property + def url(self): + try: + return get_serving_url(self.blobstore_info.key()) + except NotImageError: + return None diff --git a/djangoappengine/test.py b/djangoappengine/test.py new file mode 100644 index 0000000..2370368 --- /dev/null +++ b/djangoappengine/test.py @@ -0,0 +1,365 @@ +import os +import errno +import types +import threading +import httplib +import socket +import select +from django.db import connections +from django.test import TestCase, TransactionTestCase + +from google.appengine.datastore import datastore_stub_util + +from db.stubs import stub_manager +from utils import appid +from unittest.runner import TextTestResult + +from boot import devappserver_ver + +class GAETestCase(TestCase): + ''' + This base class configures the dev_appserver datastore to test for eventual consistency behavior. + ''' + def _pre_setup(self): + """Performs any pre-test setup. + * Set the dev_appserver consistency state. + """ + super(GAETestCase,self)._pre_setup() + + if hasattr(self, 'consistency_probability'): + datastore = stub_manager.testbed.get_stub('datastore_v3') + self._orig_policy = datastore._consistency_policy + + datastore.SetConsistencyPolicy(datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=self.consistency_probability)) + + def _post_teardown(self): + """ Performs any post-test things. This includes: + + * Putting back the original ROOT_URLCONF if it was changed. + * Force closing the connection, so that the next test gets + a clean cursor. + """ + if hasattr(self, '_orig_policy'): + datastore = stub_manager.testbed.get_stub('datastore_v3') + datastore.SetConsistencyPolicy(self._orig_policy) + + super(GAETestCase,self)._post_teardown() + +liveServerLock = threading.Lock() + +class SyncTextTestResult(TextTestResult): + def addError(self, test, err): + if hasattr(test, "server_thread"): + test.server_thread.join() + super(SyncTextTestResult, self).addError(test, err) + +if devappserver_ver == 1: + from google.appengine.tools import dev_appserver + from google.appengine.tools import dev_appserver_main + + class LiveServerThread(threading.Thread): + """ + Thread for running a live http server while the tests are running. + + This is mostly copied from django.test.testcases.LiveServerThread + It's modified slightly to launch dev_appserver instead of a plain + HTTP server. The shutdown mechanism is slightly different too. + + One big problem is that dev_appserver mangles the environment. + It's easy to run into threading issues where the dev_appserver + thread and the main application (test) thread conflict. One common + example is trying to use logging.error(), which will often cause + conflicts since dev_appserver replaces stderr. We use + liveServerLock to avoid these conflicts. + + Your own test code will need to acquire liveServerLock pretty much + every time you're doing something outside of an HTTP request. + """ + + def __init__(self, host, possible_ports, connections_override=None): + self.host = host + self.port = None + self.possible_ports = possible_ports + self.is_ready = threading.Event() + self.error = None + self.connections_override = connections_override + super(LiveServerThread, self).__init__() + + def run(self): + """ + Sets up the live server and databases, and then loops over handling + http requests. + """ + + def sync_handle_request(self): + try: + readable, _, _ = select.select([self.socket], [], [], 10) + if readable: + liveServerLock.acquire() + try: + self.original_handle_request() + except Exception, e: + pass + finally: + liveServerLock.release() + except Exception, e: + pass + + if self.connections_override: + from django.db import connections + # Override this thread's database connections with the ones + # provided by the main thread. + for alias, conn in self.connections_override.items(): + connections[alias] = conn + try: + # Go through the list of possible ports, hoping that we can find + # one that is free to use for the WSGI server. + for index, port in enumerate(self.possible_ports): + try: + options = dev_appserver_main.DEFAULT_ARGS.copy() + options['disable_task_running'] = True # Prevent launch of task queue thread + dev_appserver.SetupStubs(appid, **options) + + self.httpd = dev_appserver.CreateServer(".", '/_ah/login', port, default_partition="dev") + except socket.error, e: + error_code = e.errno + if (index + 1 < len(self.possible_ports) and + error_code == errno.EADDRINUSE): + # This port is already in use, so we go on and try with + # the next one in the list. + continue + else: + # Either none of the given ports are free or the error + # is something else than "Address already in use". So + # we let that error bubble up to the main thread. + raise + + else: + # A free port was found. + self.port = port + break + + # HACK: The magic happens here. We replace the http request handler + # with our sync'd version + self.httpd.original_handle_request = self.httpd.handle_request + self.httpd.handle_request = types.MethodType(sync_handle_request, self.httpd) + + self.is_ready.set() + self.httpd.serve_forever() + except Exception, e: + self.error = e + try: + self.httpd.server_close() + except Exception, e: + pass + self.is_ready.set() + + def join(self, timeout=None): + if hasattr(self, 'httpd'): + # Stop the WSGI server + try: + self.httpd.stop_serving_forever() + # We need to hit the server with one more request to make it quit + connection = httplib.HTTPConnection(self.host, self.port) + connection.request('GET',"/") + connection.close() + #self.httpd.server_close() + except Exception, e: + pass + super(LiveServerThread, self).join(timeout) + + + # This is copied directly from django.test.testcases + class LiveServerTestCase(TransactionTestCase): + """ + Does basically the same as TransactionTestCase but also launches a live + http server in a separate thread so that the tests may use another testing + framework, such as Selenium for example, instead of the built-in dummy + client. + + Note that it inherits from TransactionTestCase instead of TestCase because + the threads do not share the same transactions (unless if using in-memory + sqlite) and each thread needs to commit all their transactions so that the + other thread can see the changes. + + Be careful that almost everything you do needs to be synchronized against + the liveServerLock (which you can easily reference as this.lock() + """ + lock = liveServerLock + + @property + def live_server_url(self): + return 'http://%s:%s' % ( + self.server_thread.host, self.server_thread.port) + + def _pre_setup(self): + connections_override = {} + for conn in connections.all(): + # If using in-memory sqlite databases, pass the connections to + # the server thread. + if (conn.settings_dict['ENGINE'] == 'django.db.backends.sqlite3' + and conn.settings_dict['NAME'] == ':memory:'): + # Explicitly enable thread-shareability for this connection + conn.allow_thread_sharing = True + connections_override[conn.alias] = conn + + # Launch the live server's thread + specified_address = os.environ.get( + 'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081-8089') + + # The specified ports may be of the form '8000-8010,8080,9200-9300' + # i.e. a comma-separated list of ports or ranges gg ports, so we break + # it down into a detailed list of all possible ports. + possible_ports = [] + try: + host, port_ranges = specified_address.split(':') + for port_range in port_ranges.split(','): + # A port range can be of either form: '8000' or '8000-8010'. + extremes = map(int, port_range.split('-')) + assert len(extremes) in [1, 2] + if len(extremes) == 1: + # Port range of the form '8000' + possible_ports.append(extremes[0]) + else: + # Port range of the form '8000-8010' + for port in range(extremes[0], extremes[1] + 1): + possible_ports.append(port) + except Exception: + raise ImproperlyConfigured('Invalid address ("%s") for live ' + 'server.' % specified_address) + + self.server_thread = LiveServerThread( + host, possible_ports, connections_override) + self.server_thread.daemon = True + self.server_thread.start() + + # Wait for the live server to be ready + self.server_thread.is_ready.wait() + if self.server_thread.error: + raise self.server_thread.error + + liveServerLock.acquire() # Lock while we load fixtures + super(LiveServerTestCase, self)._pre_setup() + liveServerLock.release() + + def _post_teardown(self): + # There may not be a 'server_thread' attribute if setUpClass() for some + # reasons has raised an exception. + if hasattr(self, 'server_thread'): + # Terminate the live server's thread + self.server_thread.join() + super(LiveServerTestCase, self)._post_teardown() + +else: # devappserver2 + import sys + from django.db import DEFAULT_DB_ALIAS + from django.core.management import call_command + import dev_appserver + from google.appengine.api import apiproxy_stub_map + from google.appengine.tools.devappserver2 import devappserver2 + from google.appengine.tools.devappserver2 import python_runtime + from google.appengine.tools.devappserver2 import shutdown + + sys.path[1:1] = dev_appserver._DEVAPPSERVER2_PATHS + + class LiveServerTestCase(TransactionTestCase): + """ + Does basically the same as TransactionTestCase but also launches a live + http server in a separate thread so that the tests may use another testing + framework, such as Selenium for example, instead of the built-in dummy + client. + + Note that it inherits from TransactionTestCase instead of TestCase because + the threads do not share the same transactions (unless if using in-memory + sqlite) and each thread needs to commit all their transactions so that the + other thread can see the changes. + + Be careful that almost everything you do needs to be synchronized against + the liveServerLock (which you can easily reference as this.lock() + """ + lock = liveServerLock + + @property + def live_server_url(self): + return 'http://%s:%s' % ( + 'localhost', self.port) + + def _pre_setup(self): + apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() + self.server = None + + # Launch the live server's thread + specified_address = os.environ.get( + 'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081-8089') + + # The specified ports may be of the form '8000-8010,8080,9200-9300' + # i.e. a comma-separated list of ports or ranges gg ports, so we break + # it down into a detailed list of all possible ports. + possible_ports = [] + try: + host, port_ranges = specified_address.split(':') + for port_range in port_ranges.split(','): + # A port range can be of either form: '8000' or '8000-8010'. + extremes = map(int, port_range.split('-')) + assert len(extremes) in [1, 2] + if len(extremes) == 1: + # Port range of the form '8000' + possible_ports.append(extremes[0]) + else: + # Port range of the form '8000-8010' + for port in range(extremes[0], extremes[1] + 1): + possible_ports.append(port) + except Exception: + raise ImproperlyConfigured('Invalid address ("%s") for live ' + 'server.' % specified_address) + + python_runtime._RUNTIME_ARGS = [ + sys.executable, + os.path.join( + os.path.dirname(dev_appserver.__file__), '_python_runtime.py')] + options = devappserver2.PARSER.parse_args([ + '--admin_port', '0', + '--port', '%s' % possible_ports[0], + '--datastore_path', ':memory:', + '--logs_path', ':memory:', + '--skip_sdk_update_check', + '--', + '.']) + self.port = possible_ports[0] + self.server = devappserver2.DevelopmentServer() + self.server.start(options) + + # Wait for the live server to be ready + # How do we sync this? + + liveServerLock.acquire() # Lock while we load fixtures + super(LiveServerTestCase, self)._pre_setup() + liveServerLock.release() + + def _fixture_setup(self): + # If the test case has a multi_db=True flag, flush all databases. + # Otherwise, just flush default. + if getattr(self, 'multi_db', False): + databases = connections + else: + databases = [DEFAULT_DB_ALIAS] + try: + for db in databases: + # call_command('flush', verbosity=0, interactive=False, database=db) + + if hasattr(self, 'fixtures'): + # We have to use this slightly awkward syntax due to the fact + # that we're using *args and **kwargs together. + call_command('loaddata', *self.fixtures, + **{'verbosity': 0, 'database': db}) + except Exception, e: + pass + + def _post_teardown(self): + if self.server: + print 'Stopping server' + self.server.stop() + print 'Server stopped' + + super(LiveServerTestCase, self)._post_teardown() + diff --git a/djangoappengine/tests/__init__.py b/djangoappengine/tests/__init__.py index 4a03015..a75e1bc 100644 --- a/djangoappengine/tests/__init__.py +++ b/djangoappengine/tests/__init__.py @@ -2,8 +2,9 @@ from .field_db_conversion import FieldDBConversionTest from .field_options import FieldOptionsTest from .filter import FilterTest -from .keys import KeysTest +from .keys import KeysTest, DbKeyFieldTest, AncestorQueryTest, ParentKeyTest from .mapreduce_input_readers import DjangoModelInputReaderTest, DjangoModelIteratorTest from .not_return_sets import NonReturnSetsTest from .order import OrderTest from .transactions import TransactionTest +from .ancestor import AncestorTest diff --git a/djangoappengine/tests/ancestor.py b/djangoappengine/tests/ancestor.py new file mode 100644 index 0000000..0f4b815 --- /dev/null +++ b/djangoappengine/tests/ancestor.py @@ -0,0 +1,47 @@ +from django.test import TestCase +from django.utils import unittest +from django.db import models + +from djangoappengine.fields import DbKeyField + +from djangoappengine.db.models.manager import Manager + +#from djangotoolbox.fields import ListField +#from google.appengine.api.datastore import Key + +class ParentFoo(models.Model): + key = DbKeyField(primary_key=True) + foo = models.IntegerField() + objects = Manager() + +class ChildFoo(models.Model): + key = DbKeyField(primary_key=True, parent_key_name='parent_key') + foo = models.IntegerField() + objects = Manager() + +class AncestorTest(TestCase): + def test_simple(self): + px = ParentFoo(foo=5) + px.save() + px = ParentFoo(foo=2) + px.save() + + parents = ParentFoo.objects.all() + self.assertEqual(2, parents.count()) + + parents = ParentFoo.objects.filter(foo=2) + self.assertEqual(1, parents.count()) + + child = ChildFoo(foo=10, parent_key=px.pk) + orig_child_pk = child.pk + child.save() + + results = list(ChildFoo.objects.ancestor(px.pk)) + + self.assertEquals(1, len(results)) + self.assertEquals(results[0].pk, child.pk) + + results = list(ChildFoo.objects.all().ancestor(px.pk)) + + self.assertEquals(1, len(results)) + self.assertEquals(results[0].pk, child.pk) \ No newline at end of file diff --git a/djangoappengine/tests/field_db_conversion.py b/djangoappengine/tests/field_db_conversion.py index 29b91e9..9ed98d6 100644 --- a/djangoappengine/tests/field_db_conversion.py +++ b/djangoappengine/tests/field_db_conversion.py @@ -1,13 +1,17 @@ import datetime from django.test import TestCase +from django.db.models.fields.files import FieldFile from google.appengine.api.datastore import Get from google.appengine.api.datastore_types import Text, Category, Email, \ Link, PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, \ Key, Rating, BlobKey +from google.appengine.api import files +from django.core.files.base import ContentFile from .testmodels import FieldsWithoutOptionsModel +from djangoappengine.storage import BlobstoreFile, BlobstoreStorage # TODO: Add field conversions for ForeignKeys? @@ -16,12 +20,18 @@ class FieldDBConversionTest(TestCase): def test_db_conversion(self): actual_datetime = datetime.datetime.now() + + bs = BlobstoreStorage() + fn = bs.save("foo.txt", ContentFile("test")) + bf = BlobstoreFile(fn, 'a', bs) + entity = FieldsWithoutOptionsModel( datetime=actual_datetime, date=actual_datetime.date(), time=actual_datetime.time(), floating_point=5.97, boolean=True, null_boolean=False, text='Hallo', email='hallo@hallo.com', comma_seperated_integer='5,4,3,2', ip_address='194.167.1.1', slug='you slugy slut :)', + file=bf, file_path=bf.name, url='http://www.scholardocs.com', long_text=1000 * 'A', indexed_text='hello', integer=-400, small_integer=-4, positive_integer=400, @@ -38,6 +48,7 @@ def test_db_conversion(self): for name, types in [('long_text', Text), ('indexed_text', unicode), ('text', unicode), ('ip_address', unicode), ('slug', unicode), + ('file', unicode), ('file_path', unicode), ('email', unicode), ('comma_seperated_integer', unicode), ('url', unicode), ('time', datetime.datetime), ('datetime', datetime.datetime), ('date', datetime.datetime), @@ -49,7 +60,7 @@ def test_db_conversion(self): column = opts.get_field_by_name(name)[0].column if not isinstance(types, (list, tuple)): types = (types, ) - self.assertTrue(type(gae_entity[column]) in types) + self.assertTrue(type(gae_entity[column]) in types, column) # Get the model instance and check if the fields convert back # to the right types. @@ -59,6 +70,7 @@ def test_db_conversion(self): ('indexed_text', unicode), ('text', unicode), ('ip_address', unicode), ('slug', unicode), + ('file', FieldFile), ('file_path', unicode), ('email', unicode), ('comma_seperated_integer', unicode), ('url', unicode), ('datetime', datetime.datetime), ('date', datetime.date), ('time', datetime.time), @@ -69,4 +81,4 @@ def test_db_conversion(self): ('positive_small_integer', (int, long))]: if not isinstance(types, (list, tuple)): types = (types, ) - self.assertTrue(type(getattr(model, name)) in types) + self.assertTrue(type(getattr(model, name)) in types, name) diff --git a/djangoappengine/tests/keys.py b/djangoappengine/tests/keys.py index 938971f..4603435 100644 --- a/djangoappengine/tests/keys.py +++ b/djangoappengine/tests/keys.py @@ -6,8 +6,13 @@ from django.test import TestCase from django.utils import unittest +from djangoappengine.fields import DbKeyField +from djangoappengine.db.utils import as_ancestor from djangotoolbox.fields import ListField +from google.appengine.api.datastore import Key + + class AutoKey(models.Model): pass @@ -287,3 +292,164 @@ def test_key_kind(self): parents_column = child._meta.get_field('parents').column self.assertEqual(child_entity[parent_column], parent_key) self.assertEqual(child_entity[parents_column][0], parent_key) + + +class ParentModel(models.Model): + key = DbKeyField(primary_key=True) + +class NonDbKeyParentModel(models.Model): + id = models.AutoField(primary_key=True) + +class ChildModel(models.Model): + key = DbKeyField(primary_key=True, parent_key_name='parent_key') + +class AnotherChildModel(models.Model): + key = DbKeyField(primary_key=True, parent_key_name='parent_key') + +class ForeignKeyModel(models.Model): + id = models.AutoField(primary_key=True) + relation = models.ForeignKey(ParentModel) + +class DbKeyFieldTest(TestCase): + def testDbKeySave(self): + model = ParentModel() + model.save() + + self.assertIsNotNone(model.pk) + + def testForeignKeyWithGAEKey(self): + parent = ParentModel() + parent.save() + + fkm = ForeignKeyModel() + fkm.relation = parent + fkm.save() + + results = list(ForeignKeyModel.objects.filter(relation=parent)) + self.assertEquals(1, len(results)) + self.assertEquals(results[0].pk, fkm.pk) + + def testPrimaryKeyQuery(self): + parent = ParentModel() + parent.save() + + db_parent = ParentModel.objects.get(pk=parent.pk) + + self.assertEquals(parent.pk, db_parent.pk) + +class ParentKeyTest(TestCase): + def testParentChildSave(self): + parent = ParentModel() + orig_parent_pk = parent.pk + parent.save() + + child = ChildModel(parent_key=parent.pk) + orig_child_pk = child.pk + child.save() + + self.assertNotEquals(parent.pk, orig_parent_pk) + self.assertNotEquals(child.pk, orig_child_pk) + self.assertEquals(child.pk.parent(), parent.pk) + + def testParentModelChildSave(self): + parent = ParentModel() + orig_parent_pk = parent.pk + parent.save() + + with self.assertRaises(ValueError): + child = ChildModel(parent_key=parent) + + def testNonDbKeyParent(self): + parent = NonDbKeyParentModel() + parent.save() + + with self.assertRaises(ValueError): + child = ChildModel(parent_key=parent.pk) + +class AncestorQueryTest(TestCase): + def testAncestorFilterQuery(self): + parent = ParentModel() + parent.save() + + child = ChildModel(parent_key=parent.pk) + child.save() + + results = list(ChildModel.objects.filter(pk=as_ancestor(parent.pk))) + + self.assertEquals(1, len(results)) + self.assertEquals(results[0].pk, child.pk) + + def testAncestorGetQuery(self): + parent = ParentModel() + parent.save() + + child = ChildModel(parent_key=parent.pk) + child.save() + + result = ChildModel.objects.get(pk=as_ancestor(parent.pk)) + + self.assertEquals(result.pk, child.pk) + + def testEmptyAncestorQuery(self): + parent = ParentModel() + parent.save() + + results = list(ChildModel.objects.filter(pk=as_ancestor(parent.pk))) + + self.assertEquals(0, len(results)) + + def testEmptyAncestorQueryWithUnsavedChild(self): + parent = ParentModel() + parent.save() + + child = ChildModel(parent_key=parent.pk) + + results = list(ChildModel.objects.filter(pk=as_ancestor(parent.pk))) + + self.assertEquals(0, len(results)) + + def testUnsavedAncestorQuery(self): + parent = ParentModel() + + with self.assertRaises(ValueError): + results = list(ChildModel.objects.filter(pk=as_ancestor(parent.pk))) + + def testDifferentChildrenAncestorQuery(self): + parent = ParentModel() + parent.save() + + child1 = ChildModel(parent_key=parent.pk) + child1.save() + child2 = AnotherChildModel(parent_key=parent.pk) + child2.save() + + results1 = list(ChildModel.objects.filter(pk=as_ancestor(parent.pk))) + + self.assertEquals(1, len(results1)) + self.assertEquals(results1[0].pk, child1.pk) + + results2 = list(AnotherChildModel.objects.filter(pk=as_ancestor(parent.pk))) + self.assertEquals(1, len(results2)) + self.assertEquals(results2[0].pk, child2.pk) + + def testDifferentParentsAncestorQuery(self): + parent1 = ParentModel() + parent1.save() + + child1 = ChildModel(parent_key=parent1.pk) + child1.save() + + parent2 = ParentModel() + parent2.save() + + child2 = ChildModel(parent_key=parent2.pk) + child2.save() + + results1 = list(ChildModel.objects.filter(pk=as_ancestor(parent1.pk))) + + self.assertEquals(1, len(results1)) + self.assertEquals(results1[0].pk, child1.pk) + + results2 = list(ChildModel.objects.filter(pk=as_ancestor(parent2.pk))) + self.assertEquals(1, len(results2)) + self.assertEquals(results2[0].pk, child2.pk) diff --git a/djangoappengine/tests/testmodels.py b/djangoappengine/tests/testmodels.py index 0b4bdc4..5e5b0fa 100644 --- a/djangoappengine/tests/testmodels.py +++ b/djangoappengine/tests/testmodels.py @@ -29,8 +29,8 @@ class FieldsWithoutOptionsModel(models.Model): ip_address = models.IPAddressField() slug = models.SlugField() url = models.URLField() -# file = models.FileField() -# file_path = models.FilePathField() + file = models.FileField() + file_path = models.FilePathField() long_text = models.TextField() indexed_text = models.TextField() integer = models.IntegerField() @@ -67,8 +67,8 @@ class FieldsWithOptionsModel(models.Model): ip_address = models.IPAddressField(default='192.168.0.2') slug = models.SlugField(default='GAGAA', null=True) url = models.URLField(default='http://www.scholardocs.com') -# file = FileField() -# file_path = FilePathField() + # file = models.FileField() + # file_path = models.FilePathField() long_text = models.TextField(default=1000 * 'A') integer = models.IntegerField(default=100) small_integer = models.SmallIntegerField(default=-5) diff --git a/djangoappengine/utils.py b/djangoappengine/utils.py index f80b2ab..9cd0290 100644 --- a/djangoappengine/utils.py +++ b/djangoappengine/utils.py @@ -15,7 +15,7 @@ appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {}, default_partition='dev')[0] appid = appconfig.application.split('~', 1)[-1] - except ImportError, e: + except Exception, e: raise Exception("Could not get appid. Is your app.yaml file missing? " "Error was: %s" % e)