From f0f3441bcb9907562a51e6ebb70a5aef7c513cc8 Mon Sep 17 00:00:00 2001 From: Dominik Kellner Date: Mon, 26 Sep 2016 15:01:44 +0200 Subject: [PATCH] Support for Cerberus 1.0 (work in progress) "Work in progress" because we still have to wait for the Cerberus issue "`readonly` conflicts with `default`" to be resolved. (see https://github.com/nicolaiarocci/cerberus/issues/268) This is a rather big change. I still decided to do a single commit, as intermediate commits would be in a non-working state anyway. Breaking changes: - `keyschema` was renamed to `valueschema` and `propertyschema` to `keyschema` (following changes in Cerberus). - A PATCH on a document which misses a field having a default value will now result in setting this value, even if the field was not provided in the PATCH's payload. - Error messages for `keyschema` are now returned as dictionary. Before: {'propertyschema_dict': 'propertyschema_dict'} Now: {'keyschema_dict': {'AAA': "value does not match regex '[a-z]+'"}} - Error messages for `type` validations are different now (following changes in Cerberus). - It is no longer valid to have a field with `default` = None and `nullable` = False. (see patch.py:test_patch_nested_document_nullable_missing) In a nutshell, changes to the codebase are as follows: - Add data layer independent subclass of `cerberus.Validator` * Support new signature of `__init__` and `validate` * Use `_config`-aware properties instead of bare member attributes to pass the `resource`, `document_id` and `persisted_document` to make them available to child validators * Add schema-docstrings to all `_validate_*` methods - Adjust Mongo-specific `Validator` subclass * Adjust `_validate_type_*` methods (following changes in Cerberus) * Add schema-docstrings to all `_validate_*` methods - Add custom ErrorHandler to support `VALIDATION_ERROR_AS_LIST` - A few renames: * `ValidationError` -> `DocumentError` * `propertyschema` -> `keyschema` and `keyschema` -> `valueschema` - Adjust tests due to different validation error messages (mostly for `type`) - Remove `transparent_schema_rules` without replacement - Remove `default`-handling, as Cerberus takes care of this now --- eve/default_settings.py | 3 - eve/defaults.py | 119 ------------- eve/flaskapp.py | 15 -- eve/io/mongo/validation.py | 322 +++++------------------------------- eve/methods/patch.py | 6 +- eve/methods/post.py | 9 +- eve/methods/put.py | 8 +- eve/tests/config.py | 20 --- eve/tests/default_values.py | 243 --------------------------- eve/tests/endpoints.py | 6 +- eve/tests/io/media.py | 4 +- eve/tests/io/mongo.py | 32 +--- eve/tests/methods/patch.py | 20 ++- eve/tests/methods/post.py | 14 +- eve/tests/test_settings.py | 4 +- eve/tests/versioning.py | 3 +- eve/validation.py | 143 +++++++++++++++- requirements.txt | 2 +- setup.py | 3 +- tox.ini | 1 + 20 files changed, 237 insertions(+), 740 deletions(-) delete mode 100644 eve/defaults.py delete mode 100644 eve/tests/default_values.py diff --git a/eve/default_settings.py b/eve/default_settings.py index 1ac76ef77..9e5cff5e0 100644 --- a/eve/default_settings.py +++ b/eve/default_settings.py @@ -228,9 +228,6 @@ # don't allow unknown key/value pairs for POST/PATCH payloads. ALLOW_UNKNOWN = False -# don't ignore unknown schema rules (raise SchemaError) -TRANSPARENT_SCHEMA_RULES = False - # Rate limits are disabled by default. Needs a running redis-server. RATE_LIMIT_GET = None RATE_LIMIT_POST = None diff --git a/eve/defaults.py b/eve/defaults.py deleted file mode 100644 index 1d59bec3a..000000000 --- a/eve/defaults.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- - -""" - Default values in schemas - ~~~~~~~~~~~~~~~~~~~~~~~~~ - - Default values for schemas work in two steps. - 1. The schema is searched for defaults and a list of default is built. - 2. In each POST/PUT request, for each default (if any) the document is - checked for a missing value, and if a value is missing the default is - added. - - :copyright: (c) 2016 by Nicola Iarocci. - :license: BSD, see LICENSE for more details. -""" - - -def build_defaults(schema): - """Build a tree of default values - - It walks the tree down looking for entries with a `default` key. In order - to avoid empty dicts the tree will be walked up and the empty dicts will be - removed. - - :param schema: Resource schema - :type schema: dict - :rtype: dict with defaults - - .. versionadded:: 0.4 - """ - # Pending schema nodes to process: loop and add defaults - pending = set() - # Stack of nodes to work on and clean up - stack = [(schema, None, None, {})] - level_schema, level_name, level_parent, current = stack[-1] - while len(stack) > 0: - leave = True - if isinstance(current, list): - level_schema = {'schema': level_schema.copy()} - for name, value in level_schema.items(): - default_next_level = None - if 'default' in value: - try: - current[name] = value['default'] - except TypeError: - current.append(value['default']) - elif value.get('type') == 'dict' and 'schema' in value: - default_next_level = {} - elif value.get('type') == 'list' and 'schema' in value: - default_next_level = [] - - if default_next_level is not None: - leave = False - next_level = add_next_level(name, current, default_next_level) - stack.append((value['schema'], name, current, next_level)) - pending.add(id(next_level)) - pending.discard(id(current)) - if leave: - # Leaves trigger the `walk up` till the next not processed node - while id(current) not in pending: - if not current and level_parent is not None: - try: - del level_parent[level_name] - except TypeError: - level_parent.remove(current) - stack.pop() - if len(stack) == 0: - break - level_schema, level_name, level_parent, current = stack[-1] - else: - level_schema, level_name, level_parent, current = stack[-1] - - return current - - -def add_next_level(name, current, default): - if isinstance(current, list): - current.append(default) - else: - default = current.setdefault(name, default) - return default - - -def resolve_default_values(document, defaults): - """ Add any defined default value for missing document fields. - - :param document: the document being posted or replaced - :param defaults: tree with the default values - :type defaults: dict - - .. versionchanged:: 0.5 - Fix #417. A default value of [] for a list causes an IndexError. - - .. versionadded:: 0.2 - """ - todo = [(defaults, document)] - while len(todo) > 0: - defaults, document = todo.pop() - if isinstance(defaults, list) and len(defaults): - todo.extend((defaults[0], item) for item in document) - continue - for name, value in defaults.items(): - if isinstance(value, dict): - # default dicts overwrite simple values - existing = document.setdefault(name, {}) - if not isinstance(existing, dict): - document[name] = {} - todo.append((value, document[name])) - if isinstance(value, list) and len(value): - existing = document.get(name) - if not existing: - document.setdefault(name, value) - continue - if all(isinstance(item, (dict, list)) for item in existing): - todo.extend((value[0], item) for item in existing) - else: - document.setdefault(name, existing) - else: - document.setdefault(name, value) diff --git a/eve/flaskapp.py b/eve/flaskapp.py index 8931af5f2..6cedd86ce 100644 --- a/eve/flaskapp.py +++ b/eve/flaskapp.py @@ -19,7 +19,6 @@ from werkzeug.serving import WSGIRequestHandler import eve -from eve.defaults import build_defaults from eve.endpoints import collections_endpoint, item_endpoint, home_endpoint, \ error_endpoint, media_endpoint, schema_collection_endpoint, \ schema_item_endpoint @@ -586,8 +585,6 @@ def _set_resource_defaults(self, resource, settings): settings.setdefault('auth_field', self.config['AUTH_FIELD']) settings.setdefault('allow_unknown', self.config['ALLOW_UNKNOWN']) - settings.setdefault('transparent_schema_rules', - self.config['TRANSPARENT_SCHEMA_RULES']) settings.setdefault('extra_response_fields', self.config['EXTRA_RESPONSE_FIELDS']) settings.setdefault('mongo_write_concern', @@ -600,12 +597,6 @@ def _set_resource_defaults(self, resource, settings): schema = settings.setdefault('schema', {}) self.set_schema_defaults(schema, settings['id_field']) - # 'defaults' helper set contains the names of fields with default - # values in their schema definition. - - # TODO support default values for embedded documents. - settings['defaults'] = build_defaults(schema) - # list of all media fields for the resource settings['_media'] = [field for field, definition in schema.items() if definition.get('type') == 'media'] @@ -686,12 +677,6 @@ def _set_resource_projection(self, ds, schema, settings): ds['projection'] is not None: ds['projection'][self.config['DELETED']] = 1 - # 'defaults' helper set contains the names of fields with default - # values in their schema definition. - - # TODO support default values for embedded documents. - settings['defaults'] = build_defaults(schema) - # list of all media fields for the resource settings['_media'] = [field for field, definition in schema.items() if definition.get('type') == 'media'] diff --git a/eve/io/mongo/validation.py b/eve/io/mongo/validation.py index a51f756e4..68dcd459b 100644 --- a/eve/io/mongo/validation.py +++ b/eve/io/mongo/validation.py @@ -11,18 +11,16 @@ :copyright: (c) 2016 by Nicola Iarocci. :license: BSD, see LICENSE for more details. """ -import copy from bson import ObjectId from bson.dbref import DBRef -from collections import Mapping from flask import current_app as app from werkzeug.datastructures import FileStorage -from cerberus import Validator from eve.auth import auth_field_and_value from eve.io.mongo.geo import Point, MultiPoint, LineString, Polygon, \ MultiLineString, MultiPolygon, GeometryCollection -from eve.utils import config, str_type +from eve.utils import config +from eve.validation import Validator from eve.versioning import get_data_version_relation_document @@ -52,69 +50,13 @@ class Validator(Validator): Support for 'transparent_schema_rules' introduced with Cerberus 0.0.3, which allows for insertion of 'default' values in POST requests. """ - def __init__(self, schema=None, resource=None, allow_unknown=False, - transparent_schema_rules=False): - self.resource = resource - self._id = None - self._original_document = None - - if resource: - transparent_schema_rules = \ - config.DOMAIN[resource]['transparent_schema_rules'] - allow_unknown = config.DOMAIN[resource]['allow_unknown'] - super(Validator, self).__init__( - schema, - transparent_schema_rules=transparent_schema_rules, - allow_unknown=allow_unknown) - - def validate_update(self, document, _id, original_document=None): - """ Validate method to be invoked when performing an update, not an - insert. - - :param document: the document to be validated. - :param _id: the unique id of the document. - """ - self._id = _id - self._original_document = original_document - return super(Validator, self).validate_update(document) - - def validate_replace(self, document, _id, original_document=None): - """ Validation method to be invoked when performing a document - replacement. This differs from :func:`validation_update` since in this - case we want to perform a full :func:`validate` (the new document is to - be considered a new insertion and required fields needs validation). - However, like with validate_update, we also want the current _id - not to be checked when validationg 'unique' values. - - .. versionadded:: 0.1.0 - """ - self._id = _id - self._original_document = original_document - return super(Validator, self).validate(document) - - def _validate_default(self, unique, field, value): - """ Fake validate function to let cerberus accept "default" - as keyword in the schema - - .. versionadded:: 0.6.2 - """ - pass def _validate_versioned(self, unique, field, value): - """ Fake validate function to let cerberus accept "versioned" - as keyword in the schema - - .. versionadded:: 0.6.2 - """ + """ {'type': 'boolean'} """ pass def _validate_unique_to_user(self, unique, field, value): - """ Validates that a value is unique to the active user. Active user is - the user authenticated for current request. See #646. - - .. versionadded: 0.6 - """ - + """ {'type': 'boolean'} """ auth_field, auth_value = auth_field_and_value(self.resource) # if an auth value has been set for this request, then make sure it is @@ -124,24 +66,7 @@ def _validate_unique_to_user(self, unique, field, value): self._is_value_unique(unique, field, value, query) def _validate_unique(self, unique, field, value): - """ Enables validation for `unique` schema attribute. - - :param unique: Boolean, wether the field value should be - unique or not. - :param field: field name. - :param value: field value. - - .. versionchanged:: 0.6 - Validates field value uniqueness against the whole datasource, - indipendently of the request method. See #646. - - .. versionchanged:: 0.3 - Support for new 'self._error' signature introduced with Cerberus - v0.5. - - .. versionchanged:: 0.2 - Handle the case in which ID_FIELD is not of ObjectId type. - """ + """ {'type': 'boolean'} """ self._is_value_unique(unique, field, value, {}) def _is_value_unique(self, unique, field, value, query): @@ -173,9 +98,9 @@ def _is_value_unique(self, unique, field, value, query): query[config.DELETED] = {'$ne': True} # exclude current document - if self._id: + if self.document_id: id_field = resource_config['id_field'] - query[id_field] = {'$ne': self._id} + query[id_field] = {'$ne': self.document_id} # we perform the check on the native mongo driver (and not on # app.data.find_one()) because in this case we don't want the usual @@ -187,30 +112,13 @@ def _is_value_unique(self, unique, field, value, query): self._error(field, "value '%s' is not unique" % value) def _validate_data_relation(self, data_relation, field, value): - """ Enables validation for `data_relation` field attribute. Makes sure - 'value' of 'field' adheres to the referential integrity rule specified - by 'data_relation'. - - :param data_relation: a dict following keys: - 'resource': foreign resource name - 'field': foreign field name - 'version': True if this relation points to a specific version - 'type': the type for the reference field if 'version': True - :param field: field name. - :param value: field value. - - .. versionchanged:: 0.4 - Support for document versioning. - - .. versionchanged:: 0.3 - Support for new 'self._error' signature introduced with Cerberus - v0.5. - - .. versionchanged:: 0.1.1 - 'collection' key renamed to 'resource' (data_relation) - - .. versionadded: 0.0.5 - """ + """ {'type': 'dict', + 'schema': { + 'resource': {'type': 'string', 'required': True}, + 'field': {'type': 'string', 'required': True}, + 'embeddable': {'type': 'boolean', 'default': False}, + 'version': {'type': 'boolean', 'default': False} + }} """ if 'version' in data_relation and data_relation['version'] is True: value_field = data_relation['field'] version_field = app.config['VERSION'] @@ -255,205 +163,63 @@ def _validate_data_relation(self, data_relation, field, value): (item.id if isinstance(item, DBRef) else item, data_resource, data_relation['field'])) - def _validate_type_objectid(self, field, value): - """ Enables validation for `objectid` data type. - - :param field: field name. - :param value: field value. - - .. versionchanged:: 0.3 - Support for new 'self._error' signature introduced with Cerberus - v0.5. - - .. versionchanged:: 0.1.1 - regex check replaced with proper type check. - """ - if not isinstance(value, ObjectId): - self._error(field, "value '%s' cannot be converted to a ObjectId" - % value) - - def _validate_type_dbref(self, field, value): - """ Enables validation for `DBRef` data type. - - :param field: field name. - :param value: field value. - - """ - if not isinstance(value, DBRef): - self._error(field, "value '%s' cannot be converted to a DBRef" - % value) - - def _validate_readonly(self, read_only, field, value): - """ - .. versionchanged:: 0.5 - Not taking defaul values in consideration anymore since they are now - being resolved after validation (#353). - Consider the original value if available (#479). - - .. versionadded:: 0.4 - """ - original_value = self._original_document.get(field) \ - if self._original_document else None - if value != original_value: - super(Validator, self)._validate_readonly(read_only, field, value) - - def _validate_dependencies(self, document, dependencies, field, - break_on_error=False): - """ With PATCH method, the validator is only provided with the updated - fields. If an updated field depends on another field in order to be - edited and the other field was previously set, the validator doesn't - see it and rejects the update. In order to avoid that we merge the - proposed changes with the original document before validating - dependencies. - - .. versionchanged:: 0.6.1 - Fix: dependencies on sub-document fields are now properly - processed (#706). - - .. versionchanged:: 0.6 - Fix: Only evaluate dependencies that don't have valid default - values. - - .. versionchanged:: 0.5.1 - Fix: dependencies with value checking seems broken #547. - - .. versionadded:: 0.5 - If a dependency has a default value, skip it as Cerberus does not - have the notion of default values and would report a missing - dependency (#353). - Fix for #363 (see docstring). - """ - if dependencies is None: + def _validate_type_objectid(self, value): + if isinstance(value, ObjectId): return True - if isinstance(dependencies, str_type): - dependencies = [dependencies] - - defaults = {} - for d in dependencies: - root = d.split('.')[0] - default = self.schema[root].get('default') - if default and root not in document: - defaults[root] = default - - if isinstance(dependencies, Mapping): - # Only evaluate dependencies that don't have *valid* defaults - for k, v in defaults.items(): - if v in dependencies[k]: - del(dependencies[k]) - else: - # Only evaluate dependencies that don't have defaults values - dependencies = [d for d in dependencies if d not in - defaults.keys()] - - dcopy = None - if self._original_document: - dcopy = copy.copy(document) - dcopy.update(self._original_document) - return super(Validator, self)._validate_dependencies(dcopy or document, - dependencies, - field, - break_on_error) - - def _validate_type_media(self, field, value): - """ Enables validation for `media` data type. - - :param field: field name. - :param value: field value. - - .. versionadded:: 0.3 - """ - if not isinstance(value, FileStorage): - self._error(field, "file was expected, got '%s' instead." % value) + def _validate_type_dbref(self, value): + if isinstance(value, DBRef): + return True - def _validate_type_point(self, field, value): - """ Enables validation for `point` data type. + def _validate_type_media(self, value): + if isinstance(value, FileStorage): + return True - :param field: field name. - :param value: field value. - """ + def _validate_type_point(self, value): try: Point(value) - except TypeError as e: - self._error(field, "Point not correct %s: %s" % (value, e)) - - def _validate_type_linestring(self, field, value): - """ Enables validation for `linestring` data type. + return True + except TypeError: + pass - :param field: field name. - :param value: field value. - """ + def _validate_type_linestring(self, value): try: LineString(value) + return True except TypeError: - self._error(field, "LineString not correct %s " % value) + pass - def _validate_type_polygon(self, field, value): - """ Enables validation for `polygon` data type. - - :param field: field name. - :param value: field value. - """ + def _validate_type_polygon(self, value): try: Polygon(value) + return True except TypeError: - self._error(field, "LineString not correct %s " % value) - - def _validate_type_multipoint(self, field, value): - """ Enables validation for `multipoint` data type. + pass - :param field: field name. - :param value: field value. - """ + def _validate_type_multipoint(self, value): try: MultiPoint(value) + return True except TypeError: - self._error(field, "MultiPoint not correct" % value) + pass - def _validate_type_multilinestring(self, field, value): - """ Enables validation for `multilinestring`data type. - - :param field: field name. - :param value: field value. - """ + def _validate_type_multilinestring(self, value): try: MultiLineString(value) + return True except TypeError: - self._error(field, "MultiLineString not correct" % value) - - def _validate_type_multipolygon(self, field, value): - """ Enables validation for `multipolygon` data type. + pass - :param field: field name. - :param value: field value. - """ + def _validate_type_multipolygon(self, value): try: MultiPolygon(value) + return True except TypeError: - self._error(field, "MultiPolygon not correct" % value) + pass - def _validate_type_geometrycollection(self, field, value): - """ Enables validation for `geometrycollection`data type - - :param field: field name. - :param value: field nvalue - """ + def _validate_type_geometrycollection(self, value): try: GeometryCollection(value) + return True except TypeError: - self._error(field, "GeometryCollection not correct" % value) - - def _error(self, field, _error): - """ Change the default behaviour so that, if VALIDATION_ERROR_AS_LIST - is enabled, single validation errors are returned as a list. See #536. - - :param field: field name - :param _error: field error(s) - - .. versionadded:: 0.6 - """ - super(Validator, self)._error(field, _error) - if config.VALIDATION_ERROR_AS_LIST: - err = self._errors[field] - if not isinstance(err, list): - self._errors[field] = [err] + pass diff --git a/eve/methods/patch.py b/eve/methods/patch.py index 9c20ff75b..9287c232b 100644 --- a/eve/methods/patch.py +++ b/eve/methods/patch.py @@ -16,7 +16,7 @@ from datetime import datetime from eve.utils import config, debug_error_message, parse_request from eve.auth import requires_auth -from eve.validation import ValidationError +from eve.validation import DocumentError from eve.methods.common import get_document, parse, payload as payload_, \ ratelimit, pre_event, store_media_files, resolve_embedded_fields, \ build_response_document, marshal_write_response, resolve_document_etag, \ @@ -138,7 +138,7 @@ def patch_internal(resource, payload=None, concurrency_check=False, resource_def = app.config['DOMAIN'][resource] schema = resource_def['schema'] - validator = app.validator(schema, resource) + validator = app.validator(schema, resource=resource) object_id = original[resource_def['id_field']] last_modified = None @@ -228,7 +228,7 @@ def patch_internal(resource, payload=None, concurrency_check=False, etag = response[config.ETAG] else: issues = validator.errors - except ValidationError as e: + except DocumentError as e: # TODO should probably log the error and abort 400 instead (when we # got logging) issues['validator exception'] = str(e) diff --git a/eve/methods/post.py b/eve/methods/post.py index 7e2503299..075706936 100644 --- a/eve/methods/post.py +++ b/eve/methods/post.py @@ -15,8 +15,7 @@ from flask import current_app as app, abort from eve.utils import config, parse_request, debug_error_message from eve.auth import requires_auth -from eve.defaults import resolve_default_values -from eve.validation import ValidationError +from eve.validation import DocumentError from eve.methods.common import parse, payload, ratelimit, \ pre_event, store_media_files, resolve_user_restricted_access, \ resolve_embedded_fields, build_response_document, marshal_write_response, \ @@ -148,7 +147,8 @@ def post_internal(resource, payl=None, skip_validation=False): date_utc = datetime.utcnow().replace(microsecond=0) resource_def = app.config['DOMAIN'][resource] schema = resource_def['schema'] - validator = None if skip_validation else app.validator(schema, resource) + validator = None if skip_validation \ + else app.validator(schema, resource=resource) documents = [] results = [] failures = 0 @@ -201,13 +201,12 @@ def post_internal(resource, payl=None, skip_validation=False): document[config.DELETED] = False resolve_user_restricted_access(document, resource) - resolve_default_values(document, resource_def['defaults']) store_media_files(document, resource) resolve_document_version(document, resource, 'POST') else: # validation errors added to list of document issues doc_issues = validator.errors - except ValidationError as e: + except DocumentError as e: doc_issues['validation exception'] = str(e) except Exception as e: # most likely a problem with the incoming payload, report back to diff --git a/eve/methods/put.py b/eve/methods/put.py index ef0347eaa..141cd5ee8 100644 --- a/eve/methods/put.py +++ b/eve/methods/put.py @@ -15,14 +15,13 @@ from werkzeug import exceptions from eve.auth import requires_auth -from eve.defaults import resolve_default_values from eve.methods.common import get_document, parse, payload as payload_, \ ratelimit, pre_event, store_media_files, resolve_user_restricted_access, \ resolve_embedded_fields, build_response_document, marshal_write_response, \ resolve_sub_resource_path, resolve_document_etag, oplog_push from eve.methods.post import post_internal from eve.utils import config, debug_error_message, parse_request -from eve.validation import ValidationError +from eve.validation import DocumentError from eve.versioning import resolve_document_version, \ insert_versioning_documents, late_versioning_catch @@ -109,7 +108,7 @@ def put_internal(resource, payload=None, concurrency_check=False, """ resource_def = app.config['DOMAIN'][resource] schema = resource_def['schema'] - validator = app.validator(schema, resource) + validator = app.validator(schema, resource=resource) if payload is None: payload = payload_() @@ -172,7 +171,6 @@ def put_internal(resource, payload=None, concurrency_check=False, document[resource_def['id_field']] = object_id resolve_user_restricted_access(document, resource) - resolve_default_values(document, resource_def['defaults']) store_media_files(document, resource, original) resolve_document_version(document, resource, 'PUT', original) @@ -208,7 +206,7 @@ def put_internal(resource, payload=None, concurrency_check=False, etag = response[config.ETAG] else: issues = validator.errors - except ValidationError as e: + except DocumentError as e: # TODO should probably log the error and abort 400 instead (when we # got logging) issues['validator exception'] = str(e) diff --git a/eve/tests/config.py b/eve/tests/config.py index 07a69b8ba..3819d8a93 100644 --- a/eve/tests/config.py +++ b/eve/tests/config.py @@ -258,8 +258,6 @@ def _test_defaults_for_resource(self, resource): self.app.config['AUTH_FIELD']) self.assertEqual(settings['allow_unknown'], self.app.config['ALLOW_UNKNOWN']) - self.assertEqual(settings['transparent_schema_rules'], - self.app.config['TRANSPARENT_SCHEMA_RULES']) self.assertEqual(settings['extra_response_fields'], self.app.config['EXTRA_RESPONSE_FIELDS']) self.assertEqual(settings['mongo_write_concern'], @@ -337,24 +335,6 @@ def assertValidateSchemaFailure(self, resource, schema, expected): else: self.fail("SchemaException expected but not raised.") - def test_schema_defaults(self): - self.domain.clear() - self.domain['resource'] = { - 'schema': { - 'title': { - 'type': 'string', - 'default': 'Mr.', - }, - 'price': { - 'type': 'integer', - 'default': 100 - }, - } - } - self.app.set_defaults() - settings = self.domain['resource'] - self.assertEqual({'title': 'Mr.', 'price': 100}, settings['defaults']) - def test_url_helpers(self): self.assertNotEqual(self.app.config.get('URLS'), None) self.assertEqual(type(self.app.config['URLS']), dict) diff --git a/eve/tests/default_values.py b/eve/tests/default_values.py deleted file mode 100644 index 4da7a97aa..000000000 --- a/eve/tests/default_values.py +++ /dev/null @@ -1,243 +0,0 @@ -import unittest - -from eve.defaults import build_defaults, resolve_default_values - - -class TestBuildDefaults(unittest.TestCase): - def test_schemaless_dict(self): - schema = { - "address": { - 'type': 'dict' - } - } - self.assertEqual({}, build_defaults(schema)) - - def test_simple(self): - schema = { - "name": {'type': 'string'}, - "email": {'type': 'string', 'default': "no@example.com"} - } - res = build_defaults(schema) - self.assertEqual({'email': 'no@example.com'}, res) - - def test_nested_one_level(self): - schema = { - "address": { - 'type': 'dict', - 'schema': { - 'street': {'type': 'string'}, - 'country': {'type': 'string', 'default': 'wonderland'} - } - } - } - res = build_defaults(schema) - self.assertEqual({'address': {'country': 'wonderland'}}, res) - - def test_empty_defaults_multiple_level(self): - schema = { - 'subscription': { - 'type': 'dict', - 'schema': { - 'type': {'type': 'string'}, - 'when': { - 'type': 'dict', - 'schema': { - 'timestamp': {'type': 'int'}, - 'repr': {'type': 'string'} - } - } - } - } - } - res = build_defaults(schema) - self.assertEqual({}, res) - - def test_nested_multilevel(self): - schema = { - "subscription": { - 'type': 'dict', - 'schema': { - 'type': {'type': 'string'}, - 'when': { - 'type': 'dict', - 'schema': { - 'timestamp': {'type': 'int', 'default': 0}, - 'repr': {'type': 'string', 'default': '0'} - } - } - } - } - } - res = build_defaults(schema) - self.assertEqual( - {'subscription': {'when': {'timestamp': 0, 'repr': '0'}}}, - res) - - def test_default_in_list_schema(self): - schema = { - "one": { - 'type': 'list', - 'schema': { - 'type': 'dict', - 'schema': { - 'title': { - 'type': 'string', - 'default': 'M.' - } - } - } - }, - "two": { - 'type': 'list', - 'schema': { - 'type': 'dict', - 'schema': { - 'name': {'type': 'string'} - } - } - } - } - res = build_defaults(schema) - self.assertEqual({"one": [{'title': 'M.'}]}, res) - - def test_default_in_list_without_schema(self): - schema = { - "one": { - 'type': 'list', - 'schema': { - 'type': 'string', - 'default': 'item' - } - } - } - res = build_defaults(schema) - self.assertEqual({"one": ['item']}, res) - - def test_lists_of_lists_with_default(self): - schema = { - 'twisting': { - 'type': 'list', # list of groups - 'required': True, - 'schema': { - 'type': 'list', # list of signals (in one group) - 'schema': { - 'type': 'string', - 'default': 'listoflist', - } - } - } - } - res = build_defaults(schema) - self.assertEqual({'twisting': [['listoflist']]}, res) - - def test_lists_of_lists_without_default(self): - schema = { - 'twisting': { - 'type': 'list', # list of groups - 'required': True, - 'schema': { - 'type': 'list', # list of signals (in one group) - 'schema': { - 'type': 'ObjectId', - 'required': True - } - } - } - } - res = build_defaults(schema) - self.assertEqual({}, res) - - def test_lists_of_lists_with_a_dict(self): - schema = { - 'twisting': { - 'type': 'list', # list of groups - 'required': True, - 'schema': { - 'type': 'list', # list of signals (in one group) - 'schema': { - 'type': 'dict', - 'schema': { - 'name': { - 'type': 'string', - 'default': 'me' - } - }, - } - } - } - } - res = build_defaults(schema) - self.assertEqual({'twisting': [[{'name': 'me'}]]}, res) - - -class TestResolveDefaultValues(unittest.TestCase): - def test_one_level(self): - document = {'name': 'john'} - defaults = {'email': 'noemail'} - resolve_default_values(document, defaults) - self.assertEqual({'name': 'john', 'email': 'noemail'}, document) - - def test_multilevel(self): - document = {'name': 'myname', 'one': {'hey': 'jude'}} - defaults = {'one': {'two': {'three': 'banana'}}} - resolve_default_values(document, defaults) - expected = { - 'name': 'myname', - 'one': { - 'hey': 'jude', - 'two': {'three': 'banana'} - } - } - self.assertEqual(expected, document) - - def test_value_instead_of_dict(self): - document = {'name': 'john'} - defaults = {'name': {'first': 'john'}} - resolve_default_values(document, defaults) - self.assertEqual(document, defaults) - - def test_lists(self): - document = {"one": [{"name": "john"}, {}]} - defaults = {"one": [{"title": "M."}]} - resolve_default_values(document, defaults) - expected = {"one": [ - {"name": "john", "title": "M."}, - {"title": "M."}]} - self.assertEqual(expected, document) - - def test_list_of_list_single_value(self): - document = {'one': [[], []]} - defaults = {'one': [['listoflist']]} - resolve_default_values(document, defaults) - # This functionality is not supported, no change in the document - expected = {'one': [[], []]} - assert expected == document - - def test_list_empty_list_as_default(self): - # test that a default value of [] for a list does not causes IndexError - # (#417). - document = {'a': ['b']} - defaults = {'a': []} - resolve_default_values(document, defaults) - expected = {'a': ['b']} - assert expected == document - - def test_list_of_strings_as_default(self): - document = {} - defaults = {'a': ['b']} - resolve_default_values(document, defaults) - expected = {'a': ['b']} - assert expected == document - # overwrite defaults - document = {'a': ['c', 'd']} - defaults = {'a': ['b']} - resolve_default_values(document, defaults) - expected = {'a': ['c', 'd']} - assert expected == document - - def test_list_of_list_dict_value(self): - document = {'one': [[{}], [{}]]} - defaults = {'one': [[{'name': 'banana'}]]} - resolve_default_values(document, defaults) - expected = {'one': [[{'name': 'banana'}], [{'name': 'banana'}]]} - assert expected == document diff --git a/eve/tests/endpoints.py b/eve/tests/endpoints.py index bd94effab..f003a6c7b 100644 --- a/eve/tests/endpoints.py +++ b/eve/tests/endpoints.py @@ -45,12 +45,12 @@ class UUIDValidator(Validator): """ Extends the base mongo validator adding support for the uuid data-type """ - def _validate_type_uuid(self, field, value): + def _validate_type_uuid(self, value): try: UUID(value) + return True except ValueError: - self._error("value '%s' for field '%s' cannot be converted to a " - "UUID" % (value, field)) + pass class TestCustomConverters(TestMinimal): diff --git a/eve/tests/io/media.py b/eve/tests/io/media.py index 1380bb2fa..b67e13456 100644 --- a/eve/tests/io/media.py +++ b/eve/tests/io/media.py @@ -48,7 +48,7 @@ def test_gridfs_media_storage_post(self): self.assertEqual(STATUS_ERR, r[STATUS]) # validates media fields - self.assertTrue('file was expected' in r[ISSUES]['media']) + self.assertTrue('must be of media type' in r[ISSUES]['media']) # also validates ordinary fields self.assertTrue('required' in r[ISSUES][self.test_field]) @@ -80,7 +80,7 @@ def test_gridfs_media_storage_post_excluded_file_in_result(self): self.assertEqual(STATUS_ERR, r[STATUS]) # validates media fields - self.assertTrue('file was expected' in r[ISSUES]['media']) + self.assertTrue('must be of media type' in r[ISSUES]['media']) # also validates ordinary fields self.assertTrue('required' in r[ISSUES][self.test_field]) diff --git a/eve/tests/io/mongo.py b/eve/tests/io/mongo.py index c5e17cead..596970af4 100644 --- a/eve/tests/io/mongo.py +++ b/eve/tests/io/mongo.py @@ -100,7 +100,7 @@ def test_objectid_fail(self): v = Validator(schema, None) self.assertFalse(v.validate(doc)) self.assertTrue('id' in v.errors) - self.assertTrue('ObjectId' in v.errors['id']) + self.assertTrue('objectid' in v.errors['id']) def test_objectid_success(self): schema = {'id': {'type': 'objectid'}} @@ -114,7 +114,7 @@ def test_dbref_fail(self): v = Validator(schema, None) self.assertFalse(v.validate(doc)) self.assertTrue('id' in v.errors) - self.assertTrue('DBRef' in v.errors['id']) + self.assertTrue('dbref' in v.errors['id']) def test_dbref_success(self): schema = {'id': {'type': 'dbref'}} @@ -123,31 +123,17 @@ def test_dbref_success(self): v = Validator(schema, None) self.assertTrue(v.validate(doc)) - def test_transparent_rules(self): - schema = {'a_field': {'type': 'string'}} - v = Validator(schema) - self.assertFalse(v.transparent_schema_rules) - def test_reject_invalid_schema(self): schema = {'a_field': {'foo': 'bar'}} self.assertRaises(SchemaError, lambda: Validator(schema)) - def test_enable_transparent_rules(self): - schema = {'a_field': {'type': 'string'}} - v = Validator(schema, transparent_schema_rules=True) - self.assertTrue(v.transparent_schema_rules) - - def test_transparent_rules_accept_invalid_schema(self): - schema = {'a_field': {'foo': 'bar'}} - Validator(schema, transparent_schema_rules=True) - def test_geojson_not_compilant(self): schema = {'location': {'type': 'point'}} doc = {'location': [10.0, 123.0]} v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('Point' in v.errors['location']) + self.assertTrue('point' in v.errors['location']) def test_geometry_not_compilant(self): schema = {'location': {'type': 'point'}} @@ -155,7 +141,7 @@ def test_geometry_not_compilant(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('Point' in v.errors['location']) + self.assertTrue('point' in v.errors['location']) def test_geometrycollection_not_compilant(self): schema = {'location': {'type': 'geometrycollection'}} @@ -164,7 +150,7 @@ def test_geometrycollection_not_compilant(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('GeometryCollection' in v.errors['location']) + self.assertTrue('geometrycollection' in v.errors['location']) def test_point_success(self): schema = {'location': {'type': 'point'}} @@ -178,7 +164,7 @@ def test_point_fail(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('Point' in v.errors['location']) + self.assertTrue('point' in v.errors['location']) def test_point_integer_success(self): schema = {'location': {'type': 'point'}} @@ -201,7 +187,7 @@ def test_linestring_fail(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('LineString' in v.errors['location']) + self.assertTrue('linestring' in v.errors['location']) def test_polygon_success(self): schema = {'location': {'type': 'polygon'}} @@ -223,7 +209,7 @@ def test_polygon_fail(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('location' in v.errors) - self.assertTrue('Polygon' in v.errors['location']) + self.assertTrue('polygon' in v.errors['location']) def test_multipoint_success(self): schema = {'location': {'type': 'multipoint'}} @@ -288,7 +274,7 @@ def test_geometrycollection_fail(self): v = Validator(schema) self.assertFalse(v.validate(doc)) self.assertTrue('locations' in v.errors) - self.assertTrue('GeometryCollection' in v.errors['locations']) + self.assertTrue('geometrycollection' in v.errors['locations']) def test_dependencies_with_defaults(self): schema = { diff --git a/eve/tests/methods/patch.py b/eve/tests/methods/patch.py index 1ff5bc957..f8be46b5a 100644 --- a/eve/tests/methods/patch.py +++ b/eve/tests/methods/patch.py @@ -181,20 +181,28 @@ def test_patch_null_objectid(self): db_value = self.compare_patch_with_get(field, r) self.assertEqual(db_value, test_value) - def test_patch_defaults(self): + def test_patch_missing_default(self): + """ PATCH an object which is missing a field with a default value. + + This should result in setting the field to its default value, even if + the field is not provided in the PATCH's payload. """ field = "ref" test_value = "1234567890123456789012345" changes = {field: test_value} r = self.perform_patch(changes) - self.assertRaises(KeyError, self.compare_patch_with_get, 'title', r) + self.assertEqual(self.compare_patch_with_get('title', r), 'Mr.') + + def test_patch_missing_default_with_post_override(self): + """ PATCH an object which is missing a field with a default value. - def test_patch_defaults_with_post_override(self): + This should result in setting the field to its default value, even if + the field is not provided in the PATCH's payload. """ field = "ref" test_value = "1234567890123456789012345" r = self.perform_patch_with_post_override(field, test_value) self.assert200(r.status_code) - self.assertRaises(KeyError, self.compare_patch_with_get, 'title', - json.loads(r.get_data())) + title = self.compare_patch_with_get('title', json.loads(r.get_data())) + self.assertEqual(title, 'Mr.') def test_patch_multiple_fields(self): fields = ['ref', 'prog', 'role'] @@ -544,6 +552,7 @@ def test_patch_nested_document_nullable_missing(self): 'name': {'type': 'string'}, }, 'default': None, + 'nullable': True }, 'other': { 'type': 'dict', @@ -586,7 +595,6 @@ def test_patch_dependent_field_on_origin_document(self): # this will fail as dependent field is missing even in the # document we are trying to update. del(self.domain['contacts']['schema']['dependency_field1']['default']) - del(self.domain['contacts']['defaults']['dependency_field1']) changes = {'dependency_field2': 'value'} r, status = self.patch(self.item_id_url, data=changes, headers=[('If-Match', self.item_etag)]) diff --git a/eve/tests/methods/post.py b/eve/tests/methods/post.py index 2e4070231..bb509cd22 100644 --- a/eve/tests/methods/post.py +++ b/eve/tests/methods/post.py @@ -201,7 +201,7 @@ def test_multi_post_invalid(self): self.assertValidationError(results[1], {'ref': 'required'}) self.assertValidationError(results[3], {'ref': 'unique'}) - self.assertValidationError(results[4], {'tid': 'ObjectId'}) + self.assertValidationError(results[4], {'tid': 'objectid'}) id_field = self.domain[self.known_resource]['id_field'] self.assertTrue(id_field not in results[0]) @@ -675,21 +675,21 @@ def test_post_valueschema_dict(self): data={"valueschema_dict": {"k1": 1}}) self.assert201(status) - def test_post_propertyschema_dict(self): + def test_post_keyschema_dict(self): del(self.domain['contacts']['schema']['ref']['required']) r, status = self.post(self.known_resource_url, - data={"propertyschema_dict": {"aaa": 1}}) + data={"keyschema_dict": {"aaa": 1}}) self.assert201(status) r, status = self.post(self.known_resource_url, - data={"propertyschema_dict": {"AAA": "1"}}) + data={"keyschema_dict": {"AAA": "1"}}) self.assertValidationErrorStatus(status) issues = r[ISSUES] - self.assertTrue('propertyschema_dict' in issues) - self.assertEqual(issues['propertyschema_dict'], - 'propertyschema_dict') + self.assertTrue('keyschema_dict' in issues) + self.assertEqual(issues['keyschema_dict'], + {'AAA': "value does not match regex '[a-z]+'"}) def test_post_internal(self): # test that post_internal is available and working properly. diff --git a/eve/tests/test_settings.py b/eve/tests/test_settings.py index 425b40355..9dd2363bc 100644 --- a/eve/tests/test_settings.py +++ b/eve/tests/test_settings.py @@ -130,9 +130,9 @@ 'key1': { 'type': 'string', }, - 'propertyschema_dict': { + 'keyschema_dict': { 'type': 'dict', - 'propertyschema': {'type': 'string', 'regex': '[a-z]+'} + 'keyschema': {'type': 'string', 'regex': '[a-z]+'} }, 'valueschema_dict': { 'type': 'dict', diff --git a/eve/tests/versioning.py b/eve/tests/versioning.py index 9e781a551..fbeaaacf1 100644 --- a/eve/tests/versioning.py +++ b/eve/tests/versioning.py @@ -900,8 +900,7 @@ def test_referential_integrity(self): r, status = self.post('/invoices/', data=data) self.assertValidationErrorStatus(status) self.assertValidationError( - r, {'person': { - value_field: "value 'bad' cannot be converted to a ObjectId"}}) + r, {'person': {value_field: "must be of objectid type"}}) # unknown id data = {"person": { diff --git a/eve/validation.py b/eve/validation.py index 85a9b8f59..09279fee7 100644 --- a/eve/validation.py +++ b/eve/validation.py @@ -12,5 +12,144 @@ :license: BSD, see LICENSE for more details. """ -# flake8: noqa -from cerberus import ValidationError, SchemaError +import copy +import cerberus +import cerberus.errors +from cerberus import DocumentError, SchemaError # flake8: noqa + +from eve.utils import config + + +class Validator(cerberus.Validator): + + def __init__(self, *args, **kwargs): + kwargs['error_handler'] = DefaultErrorHandler + + resource = kwargs.get('resource', None) + if resource: + resource_def = config.DOMAIN[resource] + kwargs['allow_unknown'] = resource_def['allow_unknown'] + super(Validator, self).__init__(*args, **kwargs) + + def validate_update(self, document, document_id, persisted_document=None): + """ Validate method to be invoked when performing an update, not an + insert. + + :param document: the document to be validated. + :param document_id: the unique id of the document. + :param persisted_document: the persisted document to be updated. + """ + self.document_id = document_id + self.persisted_document = persisted_document + return super(Validator, self).validate(document, update=True) + + def validate_replace(self, document, document_id, persisted_document=None): + """ Validation method to be invoked when performing a document + replacement. This differs from :func:`validation_update` since in this + case we want to perform a full :func:`validate` (the new document is to + be considered a new insertion and required fields needs validation). + However, like with validate_update, we also want the current document_id + not to be checked when validating 'unique' values. + + :param document: the document to be validated. + :param document_id: the unique id of the document. + :param persisted_document: the persisted document to be updated. + + .. versionadded:: 0.1.0 + """ + self.document_id = document_id + self.persisted_document = persisted_document + return super(Validator, self).validate(document) + + def _normalize_default(self, mapping, schema, field): + """ {'nullable': True} """ + if not self.persisted_document or \ + field not in self.persisted_document: + super(Validator, self)._normalize_default(mapping, schema, field) + + def _normalize_default_setter(self, mapping, schema, field): + """ {'oneof': [ + {'type': 'callable'}, + {'type': 'string'} + ]} """ + if not self.persisted_document or \ + field not in self.persisted_document: + super(Validator, self)._normalize_default_setter(mapping, schema, + field) + + def _validate_dependencies(self, dependencies, field, value): + """ {'type': ['dict', 'hashable', 'hashables']} """ + persisted = self._filter_persisted_fields_not_in_document(dependencies) + if persisted: + dcopy = copy.copy(self.document) + for field in persisted: + dcopy[field] = self.persisted_document[field] + validator = self._get_child_validator() + validator.validate(dcopy, update=self.update) + self._error(validator._errors) + else: + super(Validator, self)._validate_dependencies(dependencies, field, + value) + + def _filter_persisted_fields_not_in_document(self, fields): + def persisted_but_not_in_document(field): + return field not in self.document and \ + self.persisted_document and \ + field in self.persisted_document + return list(filter(persisted_but_not_in_document, fields)) + + def _validate_readonly(self, read_only, field, value): + """ {'type': 'boolean'} """ + persisted_value = self.persisted_document.get(field) \ + if self.persisted_document else None + if value != persisted_value: + super(Validator, self)._validate_readonly(read_only, field, value) + + @property + def resource(self): + return self._config.get('resource', None) + + @resource.setter + def resource(self, value): + self._config['resource'] = value + + @property + def document_id(self): + return self._config.get('document_id', None) + + @document_id.setter + def document_id(self, value): + self._config['document_id'] = value + + @property + def persisted_document(self): + return self._config.get('persisted_document', None) + + @persisted_document.setter + def persisted_document(self, value): + self._config['persisted_document'] = value + + +class DefaultErrorHandler(cerberus.errors.BasicErrorHandler): + """ Default Cerberus error handler for Eve. + + Since Cerberus 1.0, error messages for fields will always be returned as + lists, even in the case of a single error. To maintain compatibilty with + clients, this error handler will unpack single-element error lists unless + the config item VALIDATION_ERROR_AS_LIST is True. + """ + + @property + def pretty_tree(self): + pretty = super(DefaultErrorHandler, self).pretty_tree + if not config.VALIDATION_ERROR_AS_LIST: + self._unpack_single_element_lists(pretty) + return pretty + + def _unpack_single_element_lists(self, tree): + for field in tree: + error_list = tree[field] + if len(error_list) > 0 and isinstance(tree[field][-1], dict): + self._unpack_single_element_lists(tree[field][-1]) + if len(tree[field]) == 1: + tree[field] = tree[field][0] diff --git a/requirements.txt b/requirements.txt index a11ca110c..acef0f99b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Cerberus==0.9.2 +git+git://github.com/dkellner/cerberus.git@develop#egg=Cerberus-1.0.2.dev0 Events==0.2.1 Flask-PyMongo==0.4.1 Flask==0.10.1 diff --git a/setup.py b/setup.py index 986472408..1652cb0bb 100755 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ LONG_DESCRIPTION = f.read() install_requires = [ - 'cerberus>=0.9.2,<0.10', + 'cerberus>=1.0.2.dev0', 'events>=0.2.1,<0.3', 'simplejson>=3.3.0,<4.0', 'werkzeug>=0.9.4,<0.11.4', @@ -37,6 +37,7 @@ platforms=["any"], packages=find_packages(), test_suite="eve.tests", + dependency_links=['https://github.com/dkellner/cerberus/tarball/develop#egg=cerberus-1.0.2.dev0'], install_requires=install_requires, tests_require=['redis', 'testfixtures'], classifiers=[ diff --git a/tox.ini b/tox.ini index 406fe1661..5c0dff65a 100644 --- a/tox.ini +++ b/tox.ini @@ -3,6 +3,7 @@ envlist=py26,py27,py33,py34,pypy,flake8 [testenv] commands=python setup.py test {posargs} +install_command=pip install --process-dependency-links {packages} [testenv:flake8] deps=flake8