Skip to content

Commit

Permalink
Merge branch 'release/0.3.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
erichiggins committed Oct 22, 2016
2 parents 074a01e + 572906e commit 4679a2c
Show file tree
Hide file tree
Showing 9 changed files with 307 additions and 47 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ docs/_build/

# Autoenv
.env
.dev_env

# GAE
google
Expand Down
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,15 @@
0.3.0
=====
- Added support for encoding `ndb.Key` objects as entities, pairs, or urlsafe-strings.
- Fixed a compatibility issue between `ndb_json` and `freezegun`.
- Fixed an issue where `'null'` could not be decoded properly.
- Fixed an issue where nested date strings were not decoded into `datetime` objects.

0.2.3
=====
- Fixed a bug in `environ.is_default_version()`
- Cleaned up `requirements.txt`

0.2.2
=====
- Build and file clean-ups.
Expand Down
63 changes: 63 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,69 @@ Usage:
# Parse a JSON string into a Python dictionary.
ndb_json.loads(json_str)

When the encoder meets a property of the `ndb.Key` type,
there are three encoding options available:

* `ndb_keys_as_entities` - encode Key property as a `Future` whose eventual result is the entity for the key.
This is the default option.
* `ndb_keys_as_pairs` - encode Key property as a tuple of (kind, id) pairs.
* `ndb_keys_as_urlsafe` - encode Key property as a websafe-base64-encoded serialized version of the key.

Please refer to [NDB Key Class](https://cloud.google.com/appengine/docs/python/ndb/keyclass) documentation for details.

For example, for the following data models:

```
class Master(ndb.Model):
name = ndb.StringProperty()
```
```
class Details(ndb.Model):
master = ndb.KeyProperty()
description = ndb.StringProperty()
```

and following records:

```
master = Master(id=123456L, name='Europe')
details = Details(
master=ndb.Key(Master, 123456L),
description='List of European customers'
)
```

The calls
```
json_str = ndb_json.dumps(details)
json_str = ndb_json.dumps(details, ndb_keys_as_entities=True)
```
will return

```
{"master": {"name": "Europe"}, "description": "List of European customers"}
```

The call
```
json_str = ndb_json.dumps(details, ndb_keys_as_pairs=True)
```
will return

```
{"master": [["Master", 123456]], "description": "List of European customers"}
```

The call
```
json_str = ndb_json.dumps(details, ndb_keys_as_urlsafe=True)
```
will return

```
{"master": "agFfcg4LEgZNYXN0ZXIYwMQHDA", "description": "List of European customers"}
```


Feature parity with the Python `json` module functions.

Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.2.3
0.3.0
2 changes: 1 addition & 1 deletion gaek/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

__author__ = 'Eric Higgins'
__email__ = '[email protected]'
__version__ = '0.2.3'
__version__ = '0.3.0'
2 changes: 1 addition & 1 deletion gaek/environ.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"""

__author__ = 'Eric Higgins'
__copyright__ = 'Copyright 2015, Eric Higgins'
__copyright__ = 'Copyright 2015-2016, Eric Higgins'
__email__ = '[email protected]'


Expand Down
144 changes: 101 additions & 43 deletions gaek/ndb_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,18 @@
Usage:
import ndb_json
# Serialize an ndb.Query into an array of JSON objects.
query = models.MyModel.query()
query_json = ndb_json.dumps(query)
# Convert into a list of Python dictionaries.
query_dicts = ndb_json.loads(query_json)
# Serialize an ndb.Model instance into a JSON object.
entity = query.get()
entity_json = ndb_json.dumps(entity)
# Convert into a Python dictionary.
entity_dict = ndb_json.loads(entity_json)
Expand All @@ -25,16 +25,15 @@
- dateutil: https://pypi.python.org/pypi/python-dateutil
"""

__author__ = 'Eric Higgins'
__copyright__ = 'Copyright 2013, Eric Higgins'
__copyright__ = 'Copyright 2013-2016, Eric Higgins'
__email__ = '[email protected]'


import base64
import datetime
import json
import re
import time
import types

Expand All @@ -46,6 +45,8 @@
'dump',
'dumps',
'loads',
'NdbDecoder',
'NdbEncoder',
)


Expand All @@ -67,14 +68,25 @@ def encode_generator(obj):
return list(obj)


def encode_key(obj):
def encode_key_as_entity(obj):
"""Get the Entity from the ndb.Key for further encoding."""
# NOTE(erichiggins): Potentially poor performance for Models w/ many KeyProperty properties.
# NOTE(ronufryk): Potentially can cause circular references and "RuntimeError: maximum recursion depth exceeded"
return obj.get_async()
# Alternative 1: Convert into pairs.
# return obj.pairs()
# Alternative 2: Convert into URL-safe base64-encoded string.
# return obj.urlsafe()


# Alias for backward-compatibility
encode_key = encode_key_as_entity


def encode_key_as_pair(obj):
"""Get the ndb.Key as a tuple of (kind, id) pairs."""
return obj.pairs()


def encode_key_as_urlsafe(obj):
"""Get the ndb.Key as URL-safe base64-encoded string."""
return obj.urlsafe()


def encode_future(obj):
Expand All @@ -98,7 +110,7 @@ def encode_complex(obj):

def encode_basevalue(obj):
"""Retrieve the actual value from a ndb.model._BaseValue.
This is a convenience function to assist with the following issue:
https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=208
"""
Expand All @@ -109,28 +121,97 @@ def encode_basevalue(obj):
ndb.MetaModel: encode_model,
ndb.Query: encode_generator,
ndb.QueryIterator: encode_generator,
ndb.Key: encode_key,
ndb.Key: encode_key_as_entity,
ndb.Future: encode_future,
datetime.date: encode_datetime,
datetime.datetime: encode_datetime,
time.struct_time: encode_generator,
types.ComplexType: encode_complex,
ndb.model._BaseValue: encode_basevalue,

}

# Sort the types so any iteration is in a deterministic order
NDB_TYPES = sorted(NDB_TYPE_ENCODING.keys(), key=lambda t: t.__name__)


class NdbDecoder(json.JSONDecoder):
"""Extend the JSON decoder to add support for datetime objects."""

def __init__(self, **kwargs):
"""Override the default __init__ in order to specify our own parameters."""
json.JSONDecoder.__init__(self, object_hook=self.object_hook_handler, **kwargs)

def object_hook_handler(self, val):
"""Handles decoding of nested date strings."""
return {k: self.decode_date(v) for k, v in val.iteritems()}

def decode_date(self, val):
"""Tries to decode strings that look like dates into datetime objects."""
if isinstance(val, basestring) and val.count('-') == 2 and len(val) > 9:
try:
dt = dateutil.parser.parse(val)
# Check for UTC.
if val.endswith(('+00:00', '-00:00', 'Z')):
# Then remove tzinfo for gae, which is offset-naive.
dt = dt.replace(tzinfo=None)
return dt
except (TypeError, ValueError):
pass
return val

def decode(self, val):
"""Override of the default decode method that also uses decode_date."""
# First try the date decoder.
new_val = self.decode_date(val)
if val != new_val:
return new_val
# Fall back to the default decoder.
return json.JSONDecoder.decode(self, val)


class NdbEncoder(json.JSONEncoder):
"""Extend the JSON encoder to add support for NDB Models."""


def __init__(self, **kwargs):
self._ndb_type_encoding = NDB_TYPE_ENCODING.copy()

keys_as_entities = kwargs.pop('ndb_keys_as_entities', False)
keys_as_pairs = kwargs.pop('ndb_keys_as_pairs', False)
keys_as_urlsafe = kwargs.pop('ndb_keys_as_urlsafe', False)

# Validate that only one of three flags is True
if ((keys_as_entities and keys_as_pairs)
or (keys_as_entities and keys_as_urlsafe)
or (keys_as_pairs and keys_as_urlsafe)):
raise ValueError('Only one of arguments ndb_keys_as_entities, ndb_keys_as_pairs, ndb_keys_as_urlsafe can be True')

if keys_as_pairs:
self._ndb_type_encoding[ndb.Key] = encode_key_as_pair
elif keys_as_urlsafe:
self._ndb_type_encoding[ndb.Key] = encode_key_as_urlsafe
else:
self._ndb_type_encoding[ndb.Key] = encode_key_as_entity


json.JSONEncoder.__init__(self, **kwargs)

def default(self, obj):
"""Overriding the default JSONEncoder.default for NDB support."""

obj_type = type(obj)
# NDB Models return a repr to calls from type().
if obj_type not in NDB_TYPE_ENCODING and hasattr(obj, '__metaclass__'):
obj_type = obj.__metaclass__
fn = NDB_TYPE_ENCODING.get(obj_type)
if obj_type not in self._ndb_type_encoding:
if hasattr(obj, '__metaclass__'):
obj_type = obj.__metaclass__
else:
# Try to encode subclasses of types
for ndb_type in NDB_TYPES:
if isinstance(obj, ndb_type):
obj_type = ndb_type
break

fn = self._ndb_type_encoding.get(obj_type)

if fn:
return fn(obj)

Expand All @@ -150,27 +231,4 @@ def dump(ndb_model, fp, **kwargs):

def loads(json_str, **kwargs):
"""Custom json loads function that converts datetime strings."""
json_dict = json.loads(json_str, **kwargs)
if isinstance(json_dict, list):
return map(iteritems, json_dict)
return iteritems(json_dict)


def iteritems(json_dict):
"""Loop over a json dict and try to convert strings to datetime."""
for key, val in json_dict.iteritems():
if isinstance(val, dict):
iteritems(val)
# TODO(erichiggins): Find a better way to detect date/time-like strings.
# Its a little hacky to check for specific chars, but avoids integers.
elif isinstance(val, basestring) and val.count('-') == 2 and len(val) > 9:
try:
json_dict[key] = dateutil.parser.parse(val)
# Check for UTC.
if val.endswith(('+00:00', '-00:00', 'Z')):
# Then remove tzinfo for gae, which is offset-naive.
json_dict[key] = json_dict[key].replace(tzinfo=None)
except (TypeError, ValueError):
pass
return json_dict

return NdbDecoder(**kwargs).decode(json_str)
1 change: 1 addition & 0 deletions requirements_test.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
nose~=1.3.7
PyYAML~=3.11
mock~=2.0.0
Loading

0 comments on commit 4679a2c

Please sign in to comment.