diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3483a83..4d4bf9d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,7 +15,7 @@ on: # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: tests: - name: Python ${{ matrix.python-version }} + name: Python ${{ matrix.python-version }}-${{ matrix.pymongo-version }} runs-on: ubuntu-latest strategy: @@ -28,6 +28,9 @@ jobs: - '3.11' - '3.12' - pypy3.9 + pymongo-version: + - pymongo3 + - pymongo4 services: mongodb: @@ -39,7 +42,7 @@ jobs: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -52,12 +55,14 @@ jobs: - name: Run tox targets for ${{ matrix.python-version }} # sed is used to remove the '.' so '3.7' becomes '37' for tox (and pypy3 doesn't become pypypy3) # and drop the '-dev' from any prerelease version - run: tox --skip-missing-interpreters false -e py`echo ${{ matrix.python-version }} | sed s/\\\.// | sed s/pypy/py/ | sed s/-dev//` + run: | + export PY_VER=`echo ${{ matrix.python-version }} | sed s/\\\.// | sed s/pypy/py/ | sed s/-dev//` + tox --skip-missing-interpreters false -e "py$PY_VER-${{ matrix.pymongo-version }}" - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: - fail_ci_if_error: true + fail_ci_if_error: false flags: tests-${{ matrix.python-version }} name: codecov-umbrella verbose: true diff --git a/docs/news.rst b/docs/news.rst index 36a7048..c147200 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,6 +3,27 @@ Ming News / Release Notes The latest releases support PyMongo 3. The older 0.5.x releases support PyMongo 2 and Python 2. +Pre-Release +--------------------- +* Prepare for PyMongo 4.0 support +* Replace ``find_and_modify()`` session methods with ``find_one_and_update()``, ``find_one_and_replace()``, + and ``find_one_and_delete()`` to closer match pymongo4's API +* Remove ``group()`` session methods as they are unsupported in pymongo4. Use the aggregation pipeline. +* Remove ``map_reduce()`` and ``inline_map_reduce()`` session methods as they are unsupported in pymongo4. + Use the aggregation pipeline. +* Several operations now return their mongo-native result objects; UpdateResult, InsertResult, etc. +* MIM: Replace ``mim.Collection.insert()`` with ``insert_one()`` and ``insert_many()`` to match pymongo4 +* MIM: Remove deprecated ``manipulate`` and ``safe`` args from pymongo's ``insert_one`` and ``insert_many`` methods +* MIM: Replace ``mim.Collection.update()`` with ``update_one()`` and ``update_many()`` to match pymongo4 +* MIM: Replace ``mim.Collection.count()`` and ``mim.Cursor.count()`` with + ``mim.Collection.estimated_document_count()`` and ``mim.Collection.count_documents()`` to match pymongo4 +* MIM: Replace ``mim.Collection.remove()`` with ``mim.Collection.delete_one()`` + and ``mim.Collection.delete_many()`` to match pymongo4 +* MIM: Rename ``collection_names()`` and ``database_names()`` to ``list_collection_names()`` + and ``list_database_names`` +* MIM: Remove ``mim.Collection.map_reduce()`` and ``mim.Collection.inline_map_reduce()`` to match pymongo4 +* MIM: Replace ``ensure_index()`` with ``create_index()`` to match pymongo4 + 0.13.0 (Mar 16, 2023) --------------------- * remove Python 3.6 support diff --git a/docs/presentations/pyatl-20100114/src/demo1.py b/docs/presentations/pyatl-20100114/src/demo1.py index a4f5164..0f6468c 100644 --- a/docs/presentations/pyatl-20100114/src/demo1.py +++ b/docs/presentations/pyatl-20100114/src/demo1.py @@ -19,14 +19,14 @@ pages = db.pages -pages.insert(page) +pages.insert_one(page) -db.collection_names() +db.list_collection_names() page = pages.find_one() page['author'] = 'Rick Copeland' -pages.save(page) +pages.replace_one(dict(_id=page._id), page) pages.find_one() diff --git a/docs/src/ming_odm_migrations.py b/docs/src/ming_odm_migrations.py index d4c2200..ad79ab2 100644 --- a/docs/src/ming_odm_migrations.py +++ b/docs/src/ming_odm_migrations.py @@ -109,7 +109,7 @@ def snippet1(): TAGS = ['foo', 'bar', 'snafu', 'mongodb'] # Insert the documents through PyMongo so that Ming is not involved - session.db.wiki_page.insert([ + session.db.wiki_page.insert_many([ dict(title='Page %s' % idx, text='Text of Page %s' %idx, tags=random.sample(TAGS, 2)) for idx in range(10) ]) @@ -134,7 +134,7 @@ def snippet5(): next(session.db.wiki_page.find()).get('metadata') def snippet6(): - session.db.mymodel.insert(dict(name='desrever')) + session.db.mymodel.insert_one(dict(name='desrever')) session.db.mymodel.find_one() # Apply migration to version 1 and then to version 2 diff --git a/docs/src/ming_odm_schemas.py b/docs/src/ming_odm_schemas.py index ca89030..7ce116f 100644 --- a/docs/src/ming_odm_schemas.py +++ b/docs/src/ming_odm_schemas.py @@ -74,8 +74,8 @@ def snippet1_2(): def snippet1_3(): Contact.query.remove({}) - session.db.contact.insert(dict(name='Invalid Contact', - email='this-is-invalid')) + session.db.contact.insert_one(dict(name='Invalid Contact', + email='this-is-invalid')) try: c1 = Contact.query.find().first() diff --git a/docs/src/ming_odm_tutorial.py b/docs/src/ming_odm_tutorial.py index d80546b..f4a3dc8 100644 --- a/docs/src/ming_odm_tutorial.py +++ b/docs/src/ming_odm_tutorial.py @@ -118,9 +118,9 @@ def snippet5_1(): session.flush() def snippet5_3(): - WikiPage.query.find_and_modify({'title': 'MyFirstPage'}, - update={'$set': {'text': 'This is my first page'}}, - new=True) + WikiPage.query.find_one_and_update({'title': 'MyFirstPage'}, + update={'$set': {'text': 'This is my first page'}}, + upsert=True) def snippet5_4(): wp = WikiPage.query.get(title='MyFirstPage') diff --git a/docs/src/ming_welcome.py b/docs/src/ming_welcome.py index d5b5a70..54f9c32 100644 --- a/docs/src/ming_welcome.py +++ b/docs/src/ming_welcome.py @@ -36,8 +36,8 @@ class __mongometa__: def snippet1(): - session.db.wiki_page.insert({'title': 'FirstPage', - 'text': 'This is my first page'}) + session.db.wiki_page.insert_one({'title': 'FirstPage', + 'text': 'This is my first page'}) session.db.wiki_page.find_one({'title': 'FirstPage'}) diff --git a/docs/userguide.rst b/docs/userguide.rst index a3d6830..5df8967 100644 --- a/docs/userguide.rst +++ b/docs/userguide.rst @@ -288,7 +288,7 @@ Querying Objects Once we have a `WikiPage` in the database, we can retrieve it using the `.query` attribute. The query attribute is a proxy to the Session query features which expose three methods that make possible to query objects :meth:`._ClassQuery.get`, -:meth:`.ODMSession.find` and :meth:`.ODMSession.find_and_modify`: +:meth:`.ODMSession.find` and :meth:`.ODMSession.find_one_and_update`: .. run-pysnippet:: ming_odm_tutorial snippet2 @@ -340,7 +340,7 @@ will track that the object needs to be updated: :skip: 1 :emphasize-lines: 17 -Another option to edit an object is to actually rely on :meth:`.ODMSession.find_and_modify` +Another option to edit an object is to actually rely on :meth:`.ODMSession.find_one_and_update` method which will query the object and update it atomically: .. run-pysnippet:: ming_odm_tutorial snippet5_3 @@ -349,7 +349,7 @@ This is often used to increment counters or acquire global locks in mongodb .. note:: - ``find_and_modify`` always refreshes the object in the IdentityMap, so the object + ``find_one_and_update`` always refreshes the object in the IdentityMap, so the object in your IdentityMap will always get replaced with the newly retrieved value. Make sure you properly flushed any previous change to the object and use the ``new`` option to avoid retrieving a stale version of the object if you plan to modify it. diff --git a/ming/base.py b/ming/base.py index aa6229f..4c73c52 100644 --- a/ming/base.py +++ b/ming/base.py @@ -69,11 +69,12 @@ class Cursor: def __bool__(self): raise MingException('Cannot evaluate Cursor to a boolean') - def __init__(self, cls, cursor, allow_extra=True, strip_extra=True): + def __init__(self, cls, cursor, allow_extra=True, strip_extra=True, find_spec=None): self.cls = cls self.cursor = cursor self._allow_extra = allow_extra self._strip_extra = strip_extra + self.find_spec = find_spec def __iter__(self): return self @@ -89,7 +90,12 @@ def next(self): __next__ = next def count(self): - return self.cursor.count() + """ + This method, although deprecated by pymongo, is kept for backcompat with existing code. + It is inaccurate when used with a cursor that has been limited or skipped. However, + this behavior is consistent with previous pymongo (3.X) and mongo shell (4.X) behavior. + """ + return self.cursor.collection.count_documents(self.find_spec) def distinct(self, *args, **kwargs): return self.cursor.distinct(*args, **kwargs) diff --git a/ming/metadata.py b/ming/metadata.py index 0e4c8c9..9e6476e 100644 --- a/ming/metadata.py +++ b/ming/metadata.py @@ -221,8 +221,9 @@ class _ClassManager(metaclass=_CurriedProxyClass): _proxy_args=('cls',) _proxy_methods = ( 'get', 'find', 'find_by', 'remove', 'count', 'update_partial', - 'group', 'ensure_index', 'ensure_indexes', 'index_information', 'drop_indexes', - 'find_and_modify', 'aggregate', 'distinct', 'map_reduce', 'inline_map_reduce', + 'create_index', 'ensure_index', 'ensure_indexes', 'index_information', 'drop_indexes', + 'find_one_and_update', 'find_one_and_replace', 'find_one_and_delete', + 'aggregate', 'distinct', ) InstanceManagerClass=_InstanceManager @@ -358,7 +359,7 @@ def _ensure_indexes(self): try: with self._lock: for idx in self.manager.indexes: - collection.ensure_index(idx.index_spec, background=True, + collection.create_index(idx.index_spec, background=True, **idx.index_options) except (MongoGone, ConnectionFailure) as e: if e.args[0] == 'not master': diff --git a/ming/metadata.pyi b/ming/metadata.pyi index 90b6e64..181044e 100644 --- a/ming/metadata.pyi +++ b/ming/metadata.pyi @@ -1,6 +1,7 @@ from bson import ObjectId from datetime import datetime from typing import Generic, TypeVar, Any, Optional, overload, List, Dict, Type, Union, Mapping, type_check_only +from pymongo.results import UpdateResult, DeleteResult import bson from ming.base import Cursor @@ -178,7 +179,6 @@ class Field: M = TypeVar('M') MongoFilter = dict -ChangeResult = dict SaveResult = Union[ObjectId, Any] class _ClassManager(Generic[M]): # proxies these from Session @@ -190,28 +190,28 @@ class _ClassManager(Generic[M]): def find_by(self, filter: MongoFilter, *args, validate: bool = True, **kwargs) -> Cursor[M]: ... #@overload #def find_by(self, filter: MongoFilter, *args, validate: Literal[False], **kwargs) -> Generator[M]: ... - def remove(self, spec_or_id: Union[MongoFilter, ObjectId] = None, **kwargs) -> ChangeResult: ... + def remove(self, spec_or_id: Union[MongoFilter, ObjectId] = None, **kwargs) -> DeleteResult: ... def count(self) -> int: ... - def update_partial(self, filter: MongoFilter, fields: dict, **kwargs) -> ChangeResult: ... + def update_partial(self, filter: MongoFilter, fields: dict, **kwargs) -> UpdateResult: ... + def find_one_and_update(self, **kwargs) -> M: ... + def find_one_and_replace(self, **kwargs) -> M: ... + def find_one_and_delete(self, **kwargs) -> M: ... """ def group(self) -> int: ... def ensure_indexes(self) -> int: ... def index_information(self) -> int: ... def drop_indexes(self) -> int: ... - def find_and_modify(self) -> int: ... def aggregate(self) -> int: ... def distinct(self) -> int: ... - def map_reduce(self) -> int: ... - def inline_map_reduce(self) -> int: ... """ class _InstanceManager: # proxies these from Session def save(self, *args: str, **kwargs) -> SaveResult: ... def insert(self, **kwargs) -> SaveResult: ... - def upsert(self, spec_fields: List[str], **kwargs) -> ChangeResult: ... - def delete(self) -> ChangeResult: ... - def set(self, fields_values: Mapping[str, Any]) -> ChangeResult: ... + def upsert(self, spec_fields: List[str], **kwargs) -> UpdateResult: ... + def delete(self) -> DeleteResult: ... + def set(self, fields_values: Mapping[str, Any]) -> UpdateResult: ... def increase_field(self, **kwargs) -> None: ... diff --git a/ming/mim.py b/ming/mim.py index 98e631f..e2244af 100644 --- a/ming/mim.py +++ b/ming/mim.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +import bson.codec_options + '''mim.py - Mongo In Memory - stripped-down version of mongo that is non-persistent and hopefully much, much faster ''' @@ -8,11 +12,13 @@ import uuid from itertools import chain import collections +import collections.abc import logging import warnings from datetime import datetime from hashlib import md5 from functools import cmp_to_key +from enum import Enum import pickle @@ -26,14 +32,19 @@ from ming.utils import LazyProperty import bson +from bson.binary import UuidRepresentation, Binary +from bson.codec_options import CodecOptions from bson.raw_bson import RawBSONDocument from pymongo import database, collection, ASCENDING, MongoClient, UpdateOne from pymongo.cursor import Cursor as PymongoCursor from pymongo.errors import InvalidOperation, OperationFailure, DuplicateKeyError -from pymongo.results import DeleteResult, UpdateResult, InsertManyResult, InsertOneResult +from pymongo.results import DeleteResult, UpdateResult, InsertManyResult, InsertOneResult, BulkWriteResult log = logging.getLogger(__name__) +UUID_REPRESENTATION = UuidRepresentation.PYTHON_LEGACY +UUID_REPRESENTATION_STR = 'pythonLegacy' + class PymongoCursorNoCleanup(PymongoCursor): def __del__(self): @@ -53,7 +64,8 @@ def __init__(self): self._databases = {} # Clone defaults from a MongoClient instance. - mongoclient = MongoClient() + mongoclient = MongoClient(uuidRepresentation=UUID_REPRESENTATION_STR) + self.options = mongoclient.options self.read_preference = mongoclient.read_preference self.write_concern = mongoclient.write_concern self.codec_options = mongoclient.codec_options @@ -83,7 +95,7 @@ def _get(self, name): db = self._databases[name] = Database(self, name) return db - def database_names(self): + def list_database_names(self): return self._databases.keys() def drop_database(self, name): @@ -162,7 +174,7 @@ def command(self, command, coll.insert(before) else: raise OperationFailure('No matching object found') - coll.update(command['query'], command['update']) + coll.update_many(command['query'], command['update']) if command.get('new', False) or upsert: return dict(value=coll.find_one(dict(_id=before['_id']))) return dict(value=before) @@ -319,7 +331,7 @@ def _get(self, name): def __repr__(self): return 'mim.Database(%s)' % self.name - def collection_names(self): + def list_collection_names(self): return self._collections.keys() def drop_collection(self, name): @@ -330,6 +342,12 @@ def clear(self): coll.clear() +class ModifyOperation(Enum): + UPDATE = 1 + REPLACE = 2 + DELETE = 3 + + class Collection(collection.Collection): def __init__(self, database, name): super().__init__(database, name) @@ -391,8 +409,8 @@ def find_one(self, filter_or_id=None, *args, **kwargs): return result return None - def __find_and_modify(self, query=None, update=None, fields=None, - upsert=False, remove=False, **kwargs): + def __find_and_modify(self, query=None, update=None, projection=None, + upsert=False, operation=ModifyOperation.UPDATE, **kwargs): if query is None: query = {} before = self.find_one(query, sort=kwargs.get('sort')) upserted = False @@ -400,58 +418,72 @@ def __find_and_modify(self, query=None, update=None, fields=None, upserted = True if upsert: result = self.__update(query, update, upsert=True) - query = {'_id': result['upserted']} + query = {'_id': result.upserted_id} else: return None before = self.find_one(query, sort=kwargs.get('sort')) - if remove: + if operation == ModifyOperation.DELETE: self.__remove({'_id': before['_id']}) - elif not upserted: - self.__update({'_id': before['_id']}, update) - - return_new = kwargs.get('new', False) - if return_new: - return self.find_one(dict(_id=before['_id']), fields) + else: + if operation == ModifyOperation.REPLACE: + if any(k.startswith('$') for k in update.keys()): + raise ValueError('replacement can not include $ operators') + # FIXME: shouldn't have to do this to mimic mongodb behavior. + # But this is all internal, so who cares? + update['_id'] = before['_id'] + self.__update({'_id': before['_id']}, update) + elif operation == ModifyOperation.UPDATE: + if not all(k.startswith('$') for k in update.keys()): + raise ValueError('update only works with $ operators') + if not upserted: + self.__update({'_id': before['_id']}, update) + + return_document = kwargs.pop('return_document', None) + return_new = kwargs.pop('new', None) + if return_new is not None: + warnings.warn('The kwarg new=True is now deprecated. Please use return_document=True instead.', DeprecationWarning, stacklevel=2) + if return_document is None: + return_document = return_new + + if return_document: + return self.find_one(dict(_id=before['_id']), projection) elif upserted: return None else: - return Projection(fields).apply(before) - - def find_and_modify(self, query=None, update=None, fields=None, - upsert=False, remove=False, **kwargs): - warnings.warn('find_and_modify is now deprecated, please use find_one_and_delete, ' - 'find_one_and_replace, find_one_and_update)', DeprecationWarning, stacklevel=2) - return self.__find_and_modify(query, update, fields, upsert, remove, **kwargs) + return Projection(projection).apply(before) def find_one_and_delete(self, filter, projection=None, sort=None, **kwargs): - return self.__find_and_modify(filter, fields=projection, remove=True, sort=sort, **kwargs) - - def find_one_and_replace(self, filter, replacement, projection=None, sort=None, - return_document=False, **kwargs): - # ReturnDocument.BEFORE -> False - # ReturnDocument.AFTER -> True - return self.__find_and_modify(filter, update=replacement, fields=projection, - sort=sort, new=return_document, **kwargs) - - def find_one_and_update(self, filter, update, projection=None, sort=None, - return_document=False, **kwargs): - # ReturnDocument.BEFORE -> False - # ReturnDocument.AFTER -> True - return self.__find_and_modify(filter, update=update, fields=projection, - sort=sort, new=return_document, **kwargs) - - def count(self, filter=None, **kwargs): - return self.find(filter, **kwargs).count() - - def __insert(self, doc_or_docs, manipulate=True, **kwargs): + return self.__find_and_modify(filter, projection=projection, operation=ModifyOperation.DELETE, sort=sort, **kwargs) + + def find_one_and_replace(self, filter, replacement, projection=None, sort=None, upsert=False, + return_document=None, **kwargs): + # pymongo.collection.ReturnDocument.BEFORE -> False + # pymongo.collection.ReturnDocument.AFTER -> True + return self.__find_and_modify(filter, update=replacement, projection=projection, + sort=sort, upsert=upsert, return_document=return_document, + operation=ModifyOperation.REPLACE, **kwargs) + + def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, + return_document=None, **kwargs): + # pymongo.collection.ReturnDocument.BEFORE -> False + # pymongo.collection.ReturnDocument.AFTER -> True + return self.__find_and_modify(filter, update=update, projection=projection, + sort=sort, upsert=upsert, return_document=return_document, + operation=ModifyOperation.UPDATE, **kwargs) + + def estimated_document_count(self, **kwargs): + return self.find({}, **kwargs)._count() + + def count_documents(self, filter=None, **kwargs): + return self.find(filter, **kwargs)._count() + + def __insert(self, doc_or_docs, **kwargs) -> InsertOneResult | InsertManyResult: result = [] if not isinstance(doc_or_docs, list): doc_or_docs = [ doc_or_docs ] for doc in doc_or_docs: - if not manipulate: - doc = bcopy(doc) bson_safe(doc) _id = doc.get('_id', ()) if _id == (): @@ -463,55 +495,41 @@ def __insert(self, doc_or_docs, manipulate=True, **kwargs): continue self._index(doc) self._data[_id] = bcopy(doc) - return result - - def insert(self, doc_or_docs, manipulate=True, **kwargs): - warnings.warn('insert is now deprecated, please use insert_one or insert_many', DeprecationWarning, stacklevel=2) - return self.__insert(doc_or_docs, manipulate, **kwargs) - - def insert_one(self, document, session=None): - result = self.__insert(document) - if result: - result = result[0] - return InsertOneResult(result or None, True) - - def insert_many(self, documents, ordered=True, session=None): - result = self.__insert(documents) - return InsertManyResult(result, True) - - def save(self, doc, **kwargs): - warnings.warn('save is now deprecated, please use insert_one or replace_one', DeprecationWarning, stacklevel=2) - _id = doc.get('_id', ()) - if _id == (): - return self.__insert(doc) + if len(result) > 1: + return InsertManyResult(result, True) else: - self.__update({'_id':_id}, doc, upsert=True) - return _id + return InsertOneResult(result, True) + + def insert_one(self, document, session=None) -> InsertOneResult: + return self.__insert(document) + + def insert_many(self, documents, ordered=True, session=None) -> InsertManyResult: + return self.__insert(documents) def replace_one(self, filter, replacement, upsert=False): - return self._update(filter, replacement, upsert) + return self.__update(filter, replacement, upsert) - def __update(self, spec, updates, upsert=False, multi=False): + def __update(self, spec, updates, upsert=False, multi=False) -> UpdateResult: bson_safe(spec) bson_safe(updates) - result = dict( - connectionId=None, - updatedExisting=False, - err=None, - ok=1.0, + + # https://pymongo.readthedocs.io/en/stable/api/pymongo/results.html#pymongo.results.UpdateResult + # TODO: SF-9544 - likely needs update in pymongo4 + raw_result = dict( n=0, - nModified=0 + nModified=0, + upserted=None, ) for doc, mspec in self._find(spec): self._deindex(doc) mspec.update(updates) self._index(doc) - result['n'] += 1 - result['nModified'] += 1 - if not multi: break - if result['n']: - result['updatedExisting'] = True - return result + raw_result['n'] += 1 + raw_result['nModified'] += 1 + if not multi: + break + if raw_result['n']: + return UpdateResult(raw_result, True) if upsert: doc = dict(spec) MatchDoc(doc).update(updates, upserted=upsert) @@ -522,25 +540,22 @@ def __update(self, spec, updates, upsert=False, multi=False): raise DuplicateKeyError('duplicate ID on upsert') self._index(doc) self._data[_id] = bcopy(doc) - result['upserted'] = _id - return result + raw_result['upserted'] = _id + return UpdateResult(raw_result, True) else: - return result - - def update(self, spec, updates, upsert=False, multi=False): - warnings.warn('update is now deprecated, please use update_many or update_one', DeprecationWarning, stacklevel=2) - return self.__update(spec, updates, upsert, multi) + return UpdateResult(raw_result, True) def update_many(self, filter, update, upsert=False): - result = self.__update(filter, update, upsert, multi=True) - return UpdateResult(result, True) + return self.__update(filter, update, upsert, multi=True) def update_one(self, filter, update, upsert=False): - result = self.__update(filter, update, upsert, multi=False) - return UpdateResult(result, True) + return self.__update(filter, update, upsert, multi=False) def __remove(self, spec=None, **kwargs): - result = dict(n=0) + # TODO: SF-9544 - likely needs update in pymongo4 + result = dict( + n=0, + ) multi = kwargs.get('multi', True) if spec is None: spec = {} new_data = {} @@ -551,26 +566,20 @@ def __remove(self, spec=None, **kwargs): else: new_data[id] = doc self._data = new_data - return result - - def remove(self, spec=None, **kwargs): - warnings.warn('remove is now deprecated, please use delete_many or delete_one', DeprecationWarning, stacklevel=2) - self.__remove(spec, **kwargs) + return DeleteResult(result, True) def delete_one(self, filter, session=None): - res = self.__remove(filter, multi=False) - return DeleteResult(res, True) + return self.__remove(filter, multi=False) def delete_many(self, filter, session=None): - res = self.__remove(filter, multi=True) - return DeleteResult(res, True) + return self.__remove(filter, multi=True) def list_indexes(self, session=None): return Cursor(self, lambda: self._indexes.values()) - def ensure_index(self, key_or_list, unique=False, cache_for=300, + def create_index(self, key_or_list, unique=False, cache_for=300, name=None, **kwargs): - if isinstance(key_or_list, list): + if isinstance(key_or_list, (list, collections.abc.ItemsView)): keys = tuple(tuple(k) for k in key_or_list) else: keys = ([key_or_list, ASCENDING],) @@ -591,10 +600,6 @@ def ensure_index(self, key_or_list, unique=False, cache_for=300, return index_name - # ensure_index is now deprecated. - def create_index(self, keys, **kwargs): - return self.ensure_index(keys, **kwargs) - def index_information(self): return { index_name: fields @@ -643,27 +648,16 @@ def _deindex(self, doc): key_values = self._extract_index_key(doc, keys) docindex.pop(key_values, None) - def map_reduce(self, map, reduce, out, full_response=False, **kwargs): - if isinstance(out, str): - out = { 'replace':out } - cmd_args = {'mapreduce': self.name, - 'map': map, - 'reduce': reduce, - 'out': out, - } - cmd_args.update(kwargs) - return self.database.command(cmd_args) - def distinct(self, key, filter=None, **kwargs): return self.database.command({'distinct': self.name, 'key': key, 'filter': filter}) def bulk_write(self, requests, ordered=True, - bypass_document_validation=False): + bypass_document_validation=False) -> BulkWriteResult: for step in requests: if isinstance(step, UpdateOne): - self.update_one(step._filter, step._doc, upsert=step._upsert) + return self.update_one(step._filter, step._doc, upsert=step._upsert) else: raise NotImplementedError( "MIM currently doesn't support %s operations" % type(step) @@ -697,7 +691,7 @@ def __init__(self, collection, _iterator_gen, if isinstance(projection, (tuple, list)): projection = {f: 1 for f in projection} - self._collection = collection + self.collection = collection self._iterator_gen = _iterator_gen self._sort = sort self._skip = skip or None # cope with 0 being passed. @@ -722,7 +716,7 @@ def iterator(self): def clone(self, **overrides): result = Cursor( - collection=self._collection, + collection=self.collection, _iterator_gen=self._iterator_gen, sort=self._sort, skip=self._skip, @@ -738,7 +732,10 @@ def rewind(self): del self.iterator self._safe_to_chain = True - def count(self): + def _count(self): + """ + An internal method to count the number of documents in the cursor. + """ return sum(1 for x in self._iterator_gen()) def __getitem__(self, key): @@ -768,7 +765,7 @@ def next(self): # mim doesn't currently do anything with codec_options, so this doesn't do anything currently # but leaving it here as a placeholder for the future - otherwise we should delete wrap_as_class() - return wrap_as_class(value, self._collection.codec_options.document_class) + return wrap_as_class(value, self.collection.codec_options.document_class) __next__ = next @@ -815,13 +812,13 @@ def hint(self, index): # checks indexes, but doesn't actually use hinting if type(index) == list: test_idx = [(i, direction) for i, direction in index if i != '$natural'] - values = [[k for k in i["key"]] for i in self._collection._indexes.values()] + values = [[k for k in i["key"]] for i in self.collection._indexes.values()] if test_idx and test_idx not in values: raise OperationFailure('database error: bad hint. Valid values: %s' % values) elif isinstance(index, str): - if index not in self._collection._indexes.keys(): + if index not in self.collection._indexes.keys(): raise OperationFailure('database error: bad hint. Valid values: %s' - % self._collection._indexes.keys()) + % self.collection._indexes.keys()) elif index is None: pass else: @@ -898,6 +895,13 @@ def _build_index(cls): def _build_types(cls): # this is a list of conversion functions, and the types they apply to # see also bson._ENCODERS for what pymongo itself handles + + def handle_binary(val): + """Treat UUIDs as binary.""" + if isinstance(val, uuid.UUID): + val = Binary.from_uuid(val, uuid_representation=UUID_REPRESENTATION) + return val + cls._types = [ (lambda x:x, [type(None)]), (lambda x:x, [int]), @@ -905,14 +909,13 @@ def _build_types(cls): (lambda x: {k: cls.to_bson(v) for k, v in x.items()}, [dict, MatchDoc]), (lambda x:list(cls.to_bson(i) for i in x), [list, MatchList]), (lambda x:x, [tuple]), - (lambda x:x, [bson.Binary]), + (lambda x:handle_binary(x), [bson.Binary, uuid.UUID]), (lambda x:x, [bytes]), (lambda x:x, [bson.ObjectId]), (lambda x:x, [bool]), (lambda x:x, [datetime]), (lambda x:x, [bson.Regex]), (lambda x:x, [float]), - (lambda x:x, [uuid.UUID]), ] @@ -968,7 +971,7 @@ def match(self, key, op, value): # $options is currently only correlated to $regex and is not a standalone operator # always True to prevent code that use for example case insensitive regex from failing # tests without any reason - log.warn('$options not implemented') + log.warning('$options not implemented') return True if op == '$ne': return BsonArith.cmp(val, value) != 0 if op == '$gt': return BsonArith.cmp(val, value) > 0 @@ -1390,11 +1393,12 @@ def validate(doc): validate(v) def bson_safe(obj): - bson.BSON.encode(obj) + codec_options = CodecOptions(uuid_representation=UUID_REPRESENTATION) + return bson.BSON.encode(obj, codec_options=codec_options) def bcopy(obj): if isinstance(obj, dict): - return bson.BSON.encode(obj).decode() + return bson_safe(obj).decode() elif isinstance(obj, list): return list(map(bcopy, obj)) else: diff --git a/ming/odm/mapper.py b/ming/odm/mapper.py index f92ef4d..d6baaa9 100644 --- a/ming/odm/mapper.py +++ b/ming/odm/mapper.py @@ -1,12 +1,18 @@ +from __future__ import annotations + +import typing import warnings from copy import copy from ming.base import Object, NoDefault from ming.utils import wordwrap -from .base import ObjectState, state, _with_hooks +from .base import ObjectState, ObjectState, state, _with_hooks from .property import FieldProperty +if typing.TYPE_CHECKING: + # from ming.odm import ODMSession + from . import ODMSession, MappedClass def mapper(cls, collection=None, session=None, **kwargs): """Gets or creates the mapper for the given ``cls`` :class:`.MappedClass`""" @@ -75,14 +81,14 @@ def __repr__(self): self.mapped_class.__name__, self.collection.m.collection_name) @_with_hooks('insert') - def insert(self, obj, state, session, **kwargs): + def insert(self, obj: MappedClass, state: ObjectState, session: ODMSession, **kwargs): doc = self.collection(state.document, skip_from_bson=True) ret = session.impl.insert(doc, validate=False) state.status = state.clean return ret @_with_hooks('update') - def update(self, obj, state, session, **kwargs): + def update(self, obj: MappedClass, state: ObjectState, session: ODMSession, **kwargs): fields = state.options.get('fields', None) if fields is None: fields = () @@ -93,12 +99,12 @@ def update(self, obj, state, session, **kwargs): return ret @_with_hooks('delete') - def delete(self, obj, state, session, **kwargs): + def delete(self, obj: MappedClass, state: ObjectState, session: ODMSession, **kwargs): doc = self.collection(state.document, skip_from_bson=True) return session.impl.delete(doc) @_with_hooks('remove') - def remove(self, session, *args, **kwargs): + def remove(self, session: ODMSession, *args, **kwargs): return session.impl.remove(self.collection, *args, **kwargs) def create(self, doc, options, remake=True): @@ -176,7 +182,7 @@ def compile(self): for p in self.properties: p.compile(self) - def update_partial(self, session, *args, **kwargs): + def update_partial(self, session: ODMSession, *args, **kwargs): return session.impl.update_partial(self.collection, *args, **kwargs) def _from_doc(self, doc, options, validate=True): @@ -319,8 +325,9 @@ def __get__(self, instance, cls=None): class _ClassQuery: """Provides ``.query`` attribute for :class:`MappedClass`.""" _proxy_methods = ( - 'find', 'find_and_modify', 'remove', 'update', 'group', 'distinct', - 'aggregate', 'map_reduce', 'inline_map_reduce') + 'find', 'remove', 'update', 'distinct', + 'find_one_and_update', 'find_one_and_replace', 'find_one_and_delete', + 'aggregate',) def __init__(self, mapper): self.mapper = mapper diff --git a/ming/odm/mapper.pyi b/ming/odm/mapper.pyi index 0856d97..aab3092 100644 --- a/ming/odm/mapper.pyi +++ b/ming/odm/mapper.pyi @@ -14,14 +14,14 @@ class _ClassQuery(Generic[M]): def find_by(self, filter: MongoFilter = None, *args, **kwargs) -> Cursor[M]: ... def remove(self, spec_or_id: Union[MongoFilter, ObjectId] = None, **kwargs) -> ChangeResult: ... def count(self) -> int: ... - def find_and_modify(self, **kwargs) -> M: ... + def find_one_and_update(self, **kwargs) -> M: ... + def find_one_and_replace(self, **kwargs) -> M: ... + def find_one_and_delete(self, **kwargs) -> M: ... def update_partial(self, filter: MongoFilter, fields: dict, **kwargs) -> ChangeResult: ... """ def group(self) -> int: ... def distinct(self) -> int: ... def aggregate(self) -> int: ... - def map_reduce(self) -> int: ... - def inline_map_reduce(self) -> int: ... """ class _InstQuery(object): @@ -40,4 +40,4 @@ class Query(_ClassQuery[M], _InstQuery): def update(self, fields, **kwargs) -> ChangeResult: ... -def __getattr__(name) -> Any: ... # marks file as incomplete \ No newline at end of file +def __getattr__(name) -> Any: ... # marks file as incomplete diff --git a/ming/odm/odmsession.py b/ming/odm/odmsession.py index c1050da..2b4cb87 100644 --- a/ming/odm/odmsession.py +++ b/ming/odm/odmsession.py @@ -1,5 +1,7 @@ from collections import defaultdict +import warnings +from pymongo.collection import ReturnDocument from pymongo.database import Database from ming.session import Session @@ -188,10 +190,10 @@ def find(self, cls, *args, **kwargs): _call_hook(self, 'cursor_created', odm_cursor, 'find', cls, *args, **kwargs) return odm_cursor - def find_and_modify(self, cls, *args, **kwargs): + def __find_and_modify(self, cls, operation: str, *args, **kwargs): """Finds and updates ``cls``. - Arguments are the same as :meth:`pymongo.collection.Collection.find_and_modify`. + Arguments are the same as :meth:`pymongo.collection.Collection.find_one_and_update`. If the session has ``autoflush`` option, the session if flushed before performing the query. @@ -203,13 +205,23 @@ def find_and_modify(self, cls, *args, **kwargs): if self.autoflush: self.flush() m = mapper(cls) - obj = self.impl.find_and_modify(m.collection, *args, **kwargs) + fn = getattr(self.impl, operation) + obj = fn(m.collection, *args, **kwargs) if obj is None: return None cursor = ODMCursor(self, cls, iter([ obj ]), refresh=True, decorate=decorate) result = cursor.first() state(result).status = ObjectState.clean return result + def find_one_and_update(self, cls, *args, **kwargs): + return self.__find_and_modify(cls, 'find_one_and_update', *args, **kwargs) + + def find_one_and_replace(self, cls, *args, **kwargs): + return self.__find_and_modify(cls, 'find_one_and_replace', *args, **kwargs) + + def find_one_and_delete(self, cls, *args, **kwargs): + return self.__find_and_modify(cls, 'find_one_and_delete', *args, **kwargs) + @_with_hooks('remove') def remove(self, cls, *args, **kwargs): """Delete one or more ``cls`` entries from the collection. @@ -265,14 +277,6 @@ def drop_indexes(self, cls): """Drop all indexes declared in ``cls``""" return self.impl.drop_indexes(cls) - def group(self, cls, *args, **kwargs): - """Runs a grouping on the model collection. - - Arguments are the same as :meth:`pymongo.collection.Collection.group`. - """ - m = mapper(cls) - return self.impl.group(m.collection, *args, **kwargs) - def aggregate(self, cls, *args, **kwargs): """Runs an aggregation pipeline on the given collection. @@ -289,22 +293,6 @@ def distinct(self, cls, *args, **kwargs): m = mapper(cls) return self.impl.distinct(m.collection, *args, **kwargs) - def map_reduce(self, cls, *args, **kwargs): - """Runs a MapReduce job and stores results in a collection. - - Arguments are the same as :meth:`pymongo.collection.Collection.map_reduce`. - """ - m = mapper(cls) - return self.impl.map_reduce(m.collection, *args, **kwargs) - - def inline_map_reduce(self, cls, *args, **kwargs): - """Runs a MapReduce job and keeps results in-memory. - - Arguments are the same as :meth:`pymongo.collection.Collection.inline_map_reduce`. - """ - m = mapper(cls) - return self.impl.inline_map_reduce(m.collection, *args, **kwargs) - class SessionExtension: """Base class that should be inherited to handle Session events.""" diff --git a/ming/session.py b/ming/session.py index 6b47fae..3fa96ed 100644 --- a/ming/session.py +++ b/ming/session.py @@ -5,7 +5,8 @@ import pymongo import pymongo.errors -from pymongo.database import Database +import pymongo.collection +import pymongo.database from .base import Cursor, Object from .datastore import DataStore @@ -48,14 +49,14 @@ def by_name(cls, name): result = cls._registry[name] = cls(cls._datastores.get(name)) return result - def _impl(self, cls): + def _impl(self, cls) -> pymongo.collection.Collection: try: return self.db[cls.m.collection_name] except TypeError: raise exc.MongoGone('MongoDB is not connected') @property - def db(self) -> Database: + def db(self) -> pymongo.database.Database: if not self.bind: raise exc.MongoGone('No MongoDB connection for "%s"' % getattr(self, '_name', 'unknown connection')) return self.bind.db @@ -82,60 +83,61 @@ def find(self, cls, *args, **kwargs): collection = self._impl(cls) cursor = collection.find(*args, **kwargs) + find_spec = kwargs.get('filter', None) or args[0] if args else {} + if not validate: return (cls(o, skip_from_bson=True) for o in cursor) return Cursor(cls, cursor, allow_extra=allow_extra, - strip_extra=strip_extra) + strip_extra=strip_extra, + find_spec=find_spec) - def remove(self, cls, *args, **kwargs): + def remove(self, cls, filter={}, *args, **kwargs): fix_write_concern(kwargs) for kwarg in kwargs: if kwarg not in ('spec_or_id', 'w'): raise ValueError("Unexpected kwarg %s. Did you mean to pass a dict? If only sent kwargs, pymongo's remove()" " would've emptied the whole collection. Which we're pretty sure you don't want." % kwarg) - return self._impl(cls).remove(*args, **kwargs) + return self._impl(cls).delete_many(filter, *args, **kwargs) def find_by(self, cls, **kwargs): return self.find(cls, kwargs) def count(self, cls): - return self._impl(cls).count() + return self._impl(cls).estimated_document_count() - def ensure_index(self, cls, fields, **kwargs): + def create_index(self, cls, fields, **kwargs): index_fields = fixup_index(fields) - return self._impl(cls).ensure_index(index_fields, **kwargs), fields + return self._impl(cls).create_index(index_fields, **kwargs) + + def ensure_index(self, cls, fields, **kwargs): + return self.create_index(cls, fields, **kwargs) def ensure_indexes(self, cls): for idx in cls.m.indexes: - self.ensure_index(cls, idx.index_spec, background=True, **idx.index_options) - - def group(self, cls, *args, **kwargs): - return self._impl(cls).group(*args, **kwargs) + self.create_index(cls, idx.index_spec, background=True, **idx.index_options) def aggregate(self, cls, *args, **kwargs): return self._impl(cls).aggregate(*args, **kwargs) - def map_reduce(self, cls, *args, **kwargs): - return self._impl(cls).map_reduce(*args, **kwargs) - - def inline_map_reduce(self, cls, *args, **kwargs): - return self._impl(cls).inline_map_reduce(*args, **kwargs) - def distinct(self, cls, *args, **kwargs): return self._impl(cls).distinct(*args, **kwargs) def update_partial(self, cls, spec, fields, upsert=False, **kw): - return self._impl(cls).update(spec, fields, upsert, **kw) + multi = kw.pop('multi', False) + if multi is True: + return self._impl(cls).update_many(spec, fields, upsert, **kw) + return self._impl(cls).update_one(spec, fields, upsert, **kw) - def find_and_modify(self, cls, query=None, sort=None, new=False, **kw): - if query is None: query = {} - if sort is None: sort = {} - options = dict(kw, query=query, sort=sort, new=new) - bson = self._impl(cls).find_and_modify(**options) - if bson is None: return None - return cls.make(bson) + def find_one_and_update(self, cls, *args, **kwargs): + return self._impl(cls).find_one_and_update(*args, **kwargs) + + def find_one_and_replace(self, cls, *args, **kwargs): + return self._impl(cls).find_one_and_replace(*args, **kwargs) + + def find_one_and_delete(self, cls, *args, **kwargs): + return self._impl(cls).find_one_and_delete(*args, **kwargs) def _prep_save(self, doc, validate): hook = doc.m.before_save @@ -151,16 +153,48 @@ def _prep_save(self, doc, validate): return data @annotate_doc_failure - def save(self, doc, *args, **kwargs): + def save(self, doc, *args, **kwargs) -> bson.ObjectId: + """ + Can either + + args + N Y + |---------------------------| + _id N | insert | raise | + |---------------------------| + Y | replace | update | + |---------------------------| + """ data = self._prep_save(doc, kwargs.pop('validate', True)) + + # if _id is None: + # doc.pop('_id', None) + + new_id = None if args: - values = {arg: data[arg] for arg in args} - result = self._impl(doc).update( - dict(_id=doc._id), {'$set': values}, **fix_write_concern(kwargs)) + if '_id' in doc: + arg_data = {arg: data[arg] for arg in args} + result = self._impl(doc).update_one( + dict(_id=doc._id), {'$set': arg_data}, + **fix_write_concern(kwargs) + ) + else: + raise ValueError('Cannot save a subset without an _id') else: - result = self._impl(doc).save(data, **fix_write_concern(kwargs)) - if result and '_id' not in doc: - doc._id = result + if '_id' in doc: + result = self._impl(doc).replace_one( + dict(_id=doc._id), data, + upsert=True, **fix_write_concern(kwargs) + ) + new_id = result.upserted_id + else: + result = self._impl(doc).insert_one( + data, **fix_write_concern(kwargs) + ) + new_id = result.inserted_id + if result and ('_id' not in doc) and (new_id is not None): + doc._id = new_id + return result @annotate_doc_failure @@ -176,13 +210,13 @@ def upsert(self, doc, spec_fields, **kwargs): self._prep_save(doc, kwargs.pop('validate', True)) if type(spec_fields) != list: spec_fields = [spec_fields] - return self._impl(doc).update({k:doc[k] for k in spec_fields}, - doc, + return self._impl(doc).update_one({k:doc[k] for k in spec_fields}, + {'$set': doc}, upsert=True) @annotate_doc_failure def delete(self, doc): - return self._impl(doc).remove({'_id':doc._id}) + return self._impl(doc).delete_one({'_id':doc._id}) def _set(self, doc, key_parts, value): if len(key_parts) == 0: @@ -202,7 +236,7 @@ def set(self, doc, fields_values): for k,v in fields_values.items(): self._set(doc, k.split('.'), v) impl = self._impl(doc) - return impl.update({'_id':doc._id}, {'$set':fields_values}) + return impl.update_one({'_id':doc._id}, {'$set':fields_values}) @annotate_doc_failure def increase_field(self, doc, **kwargs): @@ -217,11 +251,11 @@ def increase_field(self, doc, **kwargs): raise ValueError(f"{key}={value}") if key not in doc: - self._impl(doc).update( + self._impl(doc).update_one( {'_id': doc._id, key: None}, {'$set': {key: value}} ) - self._impl(doc).update( + self._impl(doc).update_one( {'_id': doc._id, key: {'$lt': value}}, # failed attempt at doing it all in one operation #{'$where': "this._id == '%s' && (!(%s in this) || this.%s < '%s')" diff --git a/ming/tests/odm/test_mapper.py b/ming/tests/odm/test_mapper.py index 3399374..2d9f2ad 100644 --- a/ming/tests/odm/test_mapper.py +++ b/ming/tests/odm/test_mapper.py @@ -168,7 +168,7 @@ def test_mapped_object(self): def test_mapper(self): m = mapper(self.Basic) assert repr(m) == '' - self.datastore.db.basic.insert(dict( + self.datastore.db.basic.insert_one(dict( a=1, b=[2,3], c=dict(d=4, e=5), f='unknown')) obj = self.Basic.query.find().options(instrument=False).first() q = self.Basic.query.find() @@ -252,11 +252,6 @@ def test_aggregate(self, pymongo_aggregate): self.Basic.query.aggregate([]) assert pymongo_aggregate.called - @patch('ming.mim.Collection.map_reduce') - def test_map_reduce(self, mim_map_reduce): - self.Basic.query.map_reduce('...', '...', {}) - assert mim_map_reduce.called - @patch('ming.mim.Collection.distinct') def test_distinct(self, mim_distinct): self.Basic.query.distinct('field') @@ -267,16 +262,6 @@ def test_cursor_distinct(self, mim_distinct): self.Basic.query.find({'a': 'b'}).distinct('field') assert mim_distinct.called - @patch('pymongo.collection.Collection.inline_map_reduce') - def test_inline_map_reduce(self, pymongo_inline_map_reduce): - self.Basic.query.inline_map_reduce() - assert pymongo_inline_map_reduce.called - - @patch('pymongo.collection.Collection.group') - def test_group(self, pymongo_group): - self.Basic.query.group() - assert pymongo_group.called - def test_multiple_update_flushes(self): initial_doc = self.Basic() initial_doc.a = 1 diff --git a/ming/tests/test_declarative.py b/ming/tests/test_declarative.py index 9665418..7be426d 100644 --- a/ming/tests/test_declarative.py +++ b/ming/tests/test_declarative.py @@ -1,3 +1,4 @@ +import os from unittest import TestCase from collections import defaultdict @@ -6,9 +7,11 @@ from pymongo.errors import AutoReconnect from ming.base import Cursor +from ming.datastore import create_datastore from ming.declarative import Document from ming.metadata import Field, Index from ming import schema as S +from ming.odm.odmsession import ODMSession, ThreadLocalODMSession from ming.session import Session from ming.exc import MingException @@ -128,6 +131,45 @@ def test_migrate(self): self.MockSession.find.assert_called_with(self.TestDoc) self.MockSession.save.assert_called_with(doc) +class TestDocumentReal(TestCase): + DATASTORE = f"mongodb://localhost/test_ming_TestDocumentReal_{os.getpid()}?serverSelectionTimeoutMS=100" + + def setUp(self): + self.datastore = create_datastore(self.DATASTORE) + self.session = Session(bind=self.datastore) + + class TestDoc(Document): + class __mongometa__: + name='test_doc' + session = self.session + indexes = [ ('a',) ] + _id = Field(S.Anything) + a=Field(S.Int, if_missing=None) + b=Field(S.Object(dict(a=S.Int(if_missing=None)))) + class TestDocNoSchema(Document): + class __mongometa__: + name='test_doc' + session = self.session + self.TestDoc = TestDoc + self.TestDocNoSchema = TestDocNoSchema + def tearDown(self): + self.TestDoc.m.remove() + self.TestDocNoSchema.m.remove() + + def test_field(self): + doc = self.TestDoc(dict(_id=1, a=1, b=dict(a=5))) + doc.m.save() + + self.assertEqual(doc.a, 1) + self.assertEqual(doc.b, dict(a=5)) + doc.a = 5 + self.assertEqual(doc, dict(_id=1, a=5, b=dict(a=5))) + del doc.a + self.assertEqual(doc, dict(_id=1, b=dict(a=5))) + self.assertRaises(AttributeError, getattr, doc, 'c') + self.assertRaises(AttributeError, getattr, doc, 'a') + self.assertEqual(self.session.count(self.TestDoc), 1) + class TestIndexes(TestCase): def setUp(self): @@ -160,11 +202,11 @@ class __mongometa__: self.MyDoc = MyDoc def test_ensure_indexes(self): - # make sure the manager constructor calls ensure_index with the right stuff + # make sure the manager constructor calls create_index with the right stuff self.MyDoc.m collection = self.MockSession.db[self.MyDoc.m.collection_name] - ensure_index = collection.ensure_index - args = ensure_index.call_args_list + create_index = collection.create_index + args = create_index.call_args_list for a in args: print(a) indexes = [ @@ -182,8 +224,8 @@ def test_ensure_indexes(self): def test_ensure_indexes_custom_options(self): self.MyDoc.m collection = self.MockSession.db[self.MyDoc.m.collection_name] - ensure_index = collection.ensure_index - args = ensure_index.call_args_list + create_index = collection.create_index + args = create_index.call_args_list custom_named_index = None for index in self.MyDoc.m.indexes: @@ -200,29 +242,29 @@ def test_ensure_indexes_custom_options(self): def test_ensure_indexes_slave(self): # on a slave, an error will be thrown, but it should be swallowed collection = self.MockSession.db[self.MyDoc.__mongometa__.name] - ensure_index = collection.ensure_index - ensure_index.side_effect = AutoReconnect('not master') + create_index = collection.create_index + create_index.side_effect = AutoReconnect('not master') self.MyDoc.m - assert ensure_index.called + assert create_index.called # don't keep trying after it failed once self.MyDoc.m - assert ensure_index.call_count == 1, ensure_index.call_args_list + assert create_index.call_count == 1, create_index.call_args_list def test_auto_ensure_indexes_option(self): - ensure_index = self.MockSession.db[self.MyDoc.__mongometa__.name].ensure_index + create_index = self.MockSession.db[self.MyDoc.__mongometa__.name].create_index self.MockSession.bind.bind._auto_ensure_indexes = False self.MyDoc.m - assert not ensure_index.called + assert not create_index.called def test_ensure_indexes_other_error(self): # same as above, but no swallowing collection = self.MockSession.db[self.MyDoc.__mongometa__.name] - ensure_index = collection.ensure_index - ensure_index.side_effect = AutoReconnect('blah blah') + create_index = collection.create_index + create_index.side_effect = AutoReconnect('blah blah') self.assertRaises(AutoReconnect, lambda: self.MyDoc.m) - assert ensure_index.called + assert create_index.called def test_index_inheritance_child_none(self): class MyChild(self.MyDoc): @@ -330,7 +372,8 @@ class __mongometa__: b=Field(S.Object(dict(a=int))) self.TestDoc = TestDoc mongo_cursor = IteratorMock(iter([ {}, {}, {} ])) - mongo_cursor.count = mock.Mock(return_value=3) + mongo_cursor.collection = mock.Mock() + mongo_cursor.collection.count_documents = mock.Mock(return_value=3) mongo_cursor.limit = mock.Mock(return_value=mongo_cursor) mongo_cursor.hint = mock.Mock(return_value=mongo_cursor) mongo_cursor.skip = mock.Mock(return_value=mongo_cursor) diff --git a/ming/tests/test_functional.py b/ming/tests/test_functional.py index 56e8603..d651cab 100644 --- a/ming/tests/test_functional.py +++ b/ming/tests/test_functional.py @@ -100,14 +100,14 @@ def setUp(self): Index('test2'), Index('test1', 'test2', direction=pymongo.DESCENDING)) - @mock.patch('ming.session.Session.ensure_index') - def test_ensure_indexes(self, ensure_index): - # make sure the manager constructor calls ensure_index with the right + @mock.patch('ming.session.Session.create_index') + def test_ensure_indexes(self, create_index): + # make sure the manager constructor calls create_index with the right # stuff self.MyDoc.m collection = self.MockSession.db[self.MyDoc.m.collection_name] - ensure_index = collection.ensure_index - args = ensure_index.call_args_list + create_index = collection.create_index + args = create_index.call_args_list indexes = [ ( ([ ('test1', pymongo.DESCENDING), ('test2', pymongo.DESCENDING) ],), dict(unique=False, sparse=False, background=True) ), @@ -117,13 +117,13 @@ def test_ensure_indexes(self, ensure_index): self.assertTrue(i in args, args) - @mock.patch('ming.session.Session.ensure_index') - def test_ensure_indexes_slave(self, ensure_index): + @mock.patch('ming.session.Session.create_index') + def test_ensure_indexes_slave(self, create_index): # on a slave, an error will be thrown, but it should be swallowed self.MyDoc.m collection = self.MockSession.db[self.MyDoc.m.collection_name] - ensure_index = collection.ensure_index - assert ensure_index.called + create_index = collection.create_index + assert create_index.called def test_index_inheritance_child_none(self): MyChild = collection(self.MyDoc, collection_name='my_child') @@ -201,7 +201,8 @@ def next(self): Field('b', dict(a=int))) mongo_cursor = IteratorMock(iter([ {}, {}, {} ])) - mongo_cursor.count = mock.Mock(return_value=3) + mongo_cursor.collection = mock.Mock() + mongo_cursor.collection.count_documents = mock.Mock(return_value=3) mongo_cursor.limit = mock.Mock(return_value=mongo_cursor) mongo_cursor.hint = mock.Mock(return_value=mongo_cursor) mongo_cursor.skip = mock.Mock(return_value=mongo_cursor) diff --git a/ming/tests/test_gridfs.py b/ming/tests/test_gridfs.py index 39a581e..617db2b 100644 --- a/ming/tests/test_gridfs.py +++ b/ming/tests/test_gridfs.py @@ -98,6 +98,6 @@ def test_get_version(self): 'jumped over the lazy dog') def test_custom_index(self): - self.ds.db[f'{self.fs_coll}.files'].ensure_index('custom_fld') + self.ds.db[f'{self.fs_coll}.files'].create_index('custom_fld') with self.TestFS.m.new_file('test.txt') as fp: fp.write('The quick brown fox') diff --git a/ming/tests/test_mim.py b/ming/tests/test_mim.py index e43953d..c9df195 100644 --- a/ming/tests/test_mim.py +++ b/ming/tests/test_mim.py @@ -17,9 +17,9 @@ class TestDatastore(TestCase): def setUp(self): self.bind = create_datastore('mim:///testdb') self.bind.conn.drop_all() - self.bind.db.coll.insert({'_id':'foo', 'a':2, 'c':[1,2,3], 'z': {'egg': 'spam', 'spam': 'egg'}}) + self.bind.db.coll.insert_one({'_id':'foo', 'a':2, 'c':[1,2,3], 'z': {'egg': 'spam', 'spam': 'egg'}}) for r in range(4): - self.bind.db.rcoll.insert({'_id':'r%s' % r, 'd':r}) + self.bind.db.rcoll.insert_one({'_id':'r%s' % r, 'd':r}) def test_limit(self): f = self.bind.db.rcoll.find @@ -27,79 +27,79 @@ def test_limit(self): self.assertEqual(4, len(f({}).limit(0).all())) def test_regex(self): - f = self.bind.db.rcoll.find - assert 4 == f(dict(_id=re.compile(r'r\d+'))).count() - assert 2 == f(dict(_id=re.compile(r'r[0-1]'))).count() + f = self.bind.db.rcoll.count_documents + assert 4 == f(dict(_id=re.compile(r'r\d+'))) + assert 2 == f(dict(_id=re.compile(r'r[0-1]'))) def test_regex_options(self): - f = self.bind.db.rcoll.find - assert 2 == f(dict(_id={'$regex': 'r[0-1]', '$options': 'i'})).count() + f = self.bind.db.rcoll.count_documents + assert 2 == f(dict(_id={'$regex': 'r[0-1]', '$options': 'i'})) def test_eq(self): - f = self.bind.db.rcoll.find - assert 1 == f(dict(d={'$eq': 0})).count() + f = self.bind.db.rcoll.count_documents + assert 1 == f(dict(d={'$eq': 0})) def test_ne(self): - f = self.bind.db.rcoll.find - assert 3 == f(dict(d={'$ne': 0})).count() + f = self.bind.db.rcoll.count_documents + assert 3 == f(dict(d={'$ne': 0})) def test_gt(self): - f = self.bind.db.rcoll.find - assert 1 == f(dict(d={'$gt': 2})).count() - assert 0 == f(dict(d={'$gt': 3})).count() + f = self.bind.db.rcoll.count_documents + assert 1 == f(dict(d={'$gt': 2})) + assert 0 == f(dict(d={'$gt': 3})) def test_gte(self): - f = self.bind.db.rcoll.find - assert 2 == f(dict(d={'$gte': 2})).count() - assert 1 == f(dict(d={'$gte': 3})).count() + f = self.bind.db.rcoll.count_documents + assert 2 == f(dict(d={'$gte': 2})) + assert 1 == f(dict(d={'$gte': 3})) def test_lt(self): - f = self.bind.db.rcoll.find - assert 0 == f(dict(d={'$lt': 0})).count() - assert 1 == f(dict(d={'$lt': 1})).count() - assert 2 == f(dict(d={'$lt': 2})).count() + f = self.bind.db.rcoll.count_documents + assert 0 == f(dict(d={'$lt': 0})) + assert 1 == f(dict(d={'$lt': 1})) + assert 2 == f(dict(d={'$lt': 2})) def test_lte(self): - f = self.bind.db.rcoll.find - assert 1 == f(dict(d={'$lte': 0})).count() - assert 2 == f(dict(d={'$lte': 1})).count() - assert 3 == f(dict(d={'$lte': 2})).count() + f = self.bind.db.rcoll.count_documents + assert 1 == f(dict(d={'$lte': 0})) + assert 2 == f(dict(d={'$lte': 1})) + assert 3 == f(dict(d={'$lte': 2})) def test_range_equal(self): - f = self.bind.db.rcoll.find - assert 1 == f(dict(d={'$gte': 2, '$lte': 2})).count() - assert 2 == f(dict(d={'$gte': 1, '$lte': 2})).count() - assert 0 == f(dict(d={'$gte': 4, '$lte': -1})).count() + f = self.bind.db.rcoll.count_documents + assert 1 == f(dict(d={'$gte': 2, '$lte': 2})) + assert 2 == f(dict(d={'$gte': 1, '$lte': 2})) + assert 0 == f(dict(d={'$gte': 4, '$lte': -1})) def test_range_inequal(self): - f = self.bind.db.rcoll.find - assert 0 == f(dict(d={'$gt': 2, '$lt': 2})).count() - assert 1 == f(dict(d={'$gt': 2, '$lt': 4})).count() - assert 0 == f(dict(d={'$gt': 1, '$lt': 2})).count() - assert 1 == f(dict(d={'$gt': 1, '$lt': 3})).count() - assert 0 == f(dict(d={'$gt': 4, '$lt': -1})).count() + f = self.bind.db.rcoll.count_documents + assert 0 == f(dict(d={'$gt': 2, '$lt': 2})) + assert 1 == f(dict(d={'$gt': 2, '$lt': 4})) + assert 0 == f(dict(d={'$gt': 1, '$lt': 2})) + assert 1 == f(dict(d={'$gt': 1, '$lt': 3})) + assert 0 == f(dict(d={'$gt': 4, '$lt': -1})) def test_exists(self): - f = self.bind.db.coll.find - assert 1 == f(dict(a={'$exists':True})).count() - assert 0 == f(dict(a={'$exists':False})).count() - assert 0 == f(dict(b={'$exists':True})).count() - assert 1 == f(dict(b={'$exists':False})).count() + f = self.bind.db.coll.count_documents + assert 1 == f(dict(a={'$exists':True})) + assert 0 == f(dict(a={'$exists':False})) + assert 0 == f(dict(b={'$exists':True})) + assert 1 == f(dict(b={'$exists':False})) def test_all(self): - f = self.bind.db.coll.find - assert 1 == f(dict(c={'$all':[1,2]})).count() - assert 1 == f(dict(c={'$all':[1,2,3]})).count() - assert 0 == f(dict(c={'$all':[2,3,4]})).count() - assert 1 == f(dict(c={'$all':[]})).count() + f = self.bind.db.coll.count_documents + assert 1 == f(dict(c={'$all':[1,2]})) + assert 1 == f(dict(c={'$all':[1,2,3]})) + assert 0 == f(dict(c={'$all':[2,3,4]})) + assert 1 == f(dict(c={'$all':[]})) def test_or(self): - f = self.bind.db.coll.find - assert 1 == f(dict({'$or': [{'c':{'$all':[1,2,3]}}]})).count() - assert 0 == f(dict({'$or': [{'c':{'$all':[4,2,3]}}]})).count() - assert 1 == f(dict({'$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]})).count() - self.assertEqual(0, f(dict({'_id': 'bar', '$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]})).count()) - self.assertEqual(1, f(dict({'_id': 'foo', '$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]})).count()) + f = self.bind.db.coll.count_documents + assert 1 == f(dict({'$or': [{'c':{'$all':[1,2,3]}}]})) + assert 0 == f(dict({'$or': [{'c':{'$all':[4,2,3]}}]})) + assert 1 == f(dict({'$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]})) + self.assertEqual(0, f(dict({'_id': 'bar', '$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]}))) + self.assertEqual(1, f(dict({'_id': 'foo', '$or': [{'a': 2}, {'c':{'$all':[1,2,3]}}]}))) def test_find_with_projection_list(self): o = self.bind.db.coll.find_one({'a': 2}, projection=['a']) @@ -159,7 +159,7 @@ def test_find_with_invalid_kwargs(self): def test_rewind(self): collection = self.bind.db.coll - collection.insert({'a':'b'}, safe=True) + collection.insert_one({'a':'b'}) cursor = collection.find() doc = cursor[0] @@ -169,14 +169,14 @@ def test_rewind(self): def test_close(self): collection = self.bind.db.coll - collection.insert({'a': 'b'}) + collection.insert_one({'a': 'b'}) cursor = collection.find() cursor.close() self.assertRaises(StopIteration, cursor.next) def test_cursor_context_manager(self): collection = self.bind.db.coll - collection.insert({'a': 'b'}) + collection.insert_one({'a': 'b'}) with collection.find() as cursor: pass self.assertRaises(StopIteration, cursor.next) @@ -185,10 +185,9 @@ def test_search(self): conn = mim.Connection().get() coll = conn.searchdatabase.coll coll.create_index([('field', 'text')]) - coll.insert({'field': 'text to be searched'}) - coll.insert({'field': 'text to be'}) - assert coll.find({'$text': {'$search': 'searched'}}, - {'score': {'$meta': 'textScore'}}).count() == 1 + coll.insert_one({'field': 'text to be searched'}) + coll.insert_one({'field': 'text to be'}) + assert coll.count_documents({'$text': {'$search': 'searched'}}) == 1 class TestDottedOperators(TestCase): @@ -196,7 +195,7 @@ class TestDottedOperators(TestCase): def setUp(self): self.bind = create_datastore('mim:///testdb') self.bind.conn.drop_all() - self.bind.db.coll.insert( + self.bind.db.coll.insert_one( {'_id':'foo', 'a':2, 'b': { 'c': 1, 'd': 2, 'e': [1,2,3], 'f': [ { 'g': 1 }, { 'g': 2 } ] }, @@ -204,65 +203,65 @@ def setUp(self): self.coll = self.bind.db.coll def test_inc_dotted_dollar(self): - self.coll.update({'b.e': 2}, { '$inc': { 'b.e.$': 1 } }) + self.coll.update_many({'b.e': 2}, { '$inc': { 'b.e.$': 1 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.e': 1 }) self.assertEqual(obj, { 'b': { 'e': [ 1,3,3 ] } }) def test_inc_dotted_dollar_middle1(self): # match on g=1 and $inc by 10 - self.coll.update({'b.f.g': 1}, { '$inc': { 'b.f.$.g': 10 } }) + self.coll.update_many({'b.f.g': 1}, { '$inc': { 'b.f.$.g': 10 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 }) self.assertEqual(obj, { 'b': { 'f': [ { 'g': 11 }, { 'g': 2 } ] }}) def test_find_dotted(self): - self.assertEqual(self.coll.find({'b.c': 1}).count(), 1) - self.assertEqual(self.coll.find({'b.c': 2}).count(), 0) + self.assertEqual(self.coll.count_documents({'b.c': 1}), 1) + self.assertEqual(self.coll.count_documents({'b.c': 2}), 0) self.assertEqual(0, len(self.coll.find({'x.y.z': 1}).all())) def test_inc_dotted(self): - self.coll.update({}, { '$inc': { 'b.c': 4 } }) + self.coll.update_many({}, { '$inc': { 'b.c': 4 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.c': 1 }) self.assertEqual(obj, { 'b': { 'c': 5 } }) def test_set_dotted(self): - self.coll.update({}, { '$set': { 'b.c': 4 } }) + self.coll.update_many({}, { '$set': { 'b.c': 4 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.c': 1 }) self.assertEqual(obj, { 'b': { 'c': 4 } }) def test_set_dotted_with_integer(self): - self.bind.db.coll.insert( + self.bind.db.coll.insert_one( {'_id':'foo2', 'a':2, 'b': [1,2,3], 'x': {} }) - self.coll.update({'_id': 'foo2'}, {'$set': {'b.0': 4}}) + self.coll.update_many({'_id': 'foo2'}, {'$set': {'b.0': 4}}) obj = self.coll.find_one({'_id': 'foo2'}) self.assertEqual(obj, {'a': 2, 'x': {}, '_id': 'foo2', 'b': [4, 2, 3]}) def test_unset_dotted(self): - self.coll.update({}, { '$unset': { 'b.f.1.g': 1 } }) + self.coll.update_many({}, { '$unset': { 'b.f.1.g': 1 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 }) self.assertEqual(obj, { 'b': { 'f': [{'g': 1}, {}] } }) # Check that it even works for keys that are not there. - self.coll.update({}, { '$unset': { 'b.this_does_not_exists': 1 } }) + self.coll.update_many({}, { '$unset': { 'b.this_does_not_exists': 1 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 }) self.assertEqual(obj, { 'b': { 'f': [{'g': 1}, {}] } }) # Check that unsetting subkeys of a nonexisting subdocument has no side effect - self.coll.update({}, {'$unset': {'this_does_not_exists.x.y.z': 1}}) + self.coll.update_many({}, {'$unset': {'this_does_not_exists.x.y.z': 1}}) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 }) self.assertEqual(obj, { 'b': { 'f': [{'g': 1}, {}] } }) def test_push_dotted(self): - self.coll.update({}, { '$push': { 'b.e': 4 } }) + self.coll.update_many({}, { '$push': { 'b.e': 4 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.e': 1 }) self.assertEqual(obj, { 'b': { 'e': [1,2,3,4] } }) def test_addToSet_dotted(self): - self.coll.update({}, { '$addToSet': { 'b.e': 4 } }) + self.coll.update_many({}, { '$addToSet': { 'b.e': 4 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.e': 1 }) self.assertEqual(obj, { 'b': { 'e': [1,2,3,4] } }) - self.coll.update({}, { '$addToSet': { 'b.e': 4 } }) + self.coll.update_many({}, { '$addToSet': { 'b.e': 4 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.e': 1 }) self.assertEqual(obj, { 'b': { 'e': [1,2,3,4] } }) @@ -285,21 +284,21 @@ def test_lt_dotted(self): self.assertNotEqual(obj, None) def test_pull_dotted(self): - self.coll.update( + self.coll.update_many( {}, { '$pull': { 'b.f': { 'g': { '$gt': 1 } } } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 } ) self.assertEqual(obj, { 'b': { 'f': [ {'g': 1 } ] } } ) def test_pull_all_dotted(self): - self.coll.update( + self.coll.update_many( {}, { '$pullAll': { 'b.f': [{'g': 1 }] } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 } ) self.assertEqual(obj, { 'b': { 'f': [ {'g': 2 } ] } } ) def test_pop_dotted(self): - self.coll.update( + self.coll.update_many( {}, { '$pop': { 'b.f': 1 } }) obj = self.coll.find_one({}, { '_id': 0, 'b.f': 1 } ) @@ -321,7 +320,7 @@ def setUp(self): self.bind = create_datastore('mim:///testdb') self.bind.conn.drop_all() self.doc = {'_id':'foo', 'a':2, 'c':[1,2,3]} - self.bind.db.coll.insert(self.doc) + self.bind.db.coll.insert_one(self.doc) def test_filemd5(self): self.assertEqual( @@ -348,210 +347,6 @@ def test_findandmodify_new(self): newdoc = self.bind.db.coll.find().next() self.assertEqual(newdoc['a'], 3, newdoc) - -class TestMRCommands(TestCommands): - - def setUp(self): - super().setUp() - if not self.bind.db._jsruntime: - self.skipTest("Javascript Runtime Unavailable") - - def test_mr_inline(self): - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, this.a); }', - reduce=self.sum_js, - out=dict(inline=1)) - self.assertEqual(result['results'], [ dict(_id=1, value=2) ]) - - def test_mr_inline_date_key(self): - dt = datetime.utcnow() - dt = dt.replace(microsecond=123000) - self.bind.db.date_coll.insert({'a': dt }) - result = self.bind.db.command( - 'mapreduce', 'date_coll', - map='function(){ emit(1, this.a); }', - reduce=self.first_js, - out=dict(inline=1)) - self.assertEqual(result['results'][0]['value'], dt) - - def test_mr_inline_date_value(self): - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, new Date()); }', - reduce=self.first_js, - out=dict(inline=1)) - self.assertEqual(result['results'][0]['_id'], 1) - self.assertTrue(isinstance(result['results'][0]['value'], datetime)) - - # MAP_TIMESTAMP and REDUCE_MIN_MAX are based on the recipe - # http://cookbook.mongodb.org/patterns/finding_max_and_min_values_for_a_key - MAP_TIMESTAMP = bson.code.Code(""" - function () { - emit('timestamp', { min : this.timestamp, - max : this.timestamp } ) - } - """) - - REDUCE_MIN_MAX = bson.code.Code(""" - function (key, values) { - var res = values[0]; - for ( var i=1; i res.max ) - res.max = values[i].max; - } - return res; - } - """) - - def test_mr_inline_multi_date_response(self): - # Calculate the min and max timestamp with one mapreduce call, - # and return a mapping containing both values. - self.bind.db.coll.remove() - docs = [{'timestamp': datetime(2013, 1, 1, 14, 0)}, - {'timestamp': datetime(2013, 1, 9, 14, 0)}, - {'timestamp': datetime(2013, 1, 19, 14, 0)}, - ] - for d in docs: - self.bind.db.date_coll.insert(d) - result = self.bind.db.date_coll.map_reduce( - map=self.MAP_TIMESTAMP, - reduce=self.REDUCE_MIN_MAX, - out={'inline': 1}) - expected = [{'value': {'min': docs[0]['timestamp'], - 'max': docs[-1]['timestamp']}, - '_id': 'timestamp'}] - print('RESULTS:', result['results']) - print('EXPECTED:', expected) - self.assertEqual(result['results'], expected) - - def test_mr_inline_collection(self): - result = self.bind.db.coll.map_reduce( - map='function(){ emit(1, this.a); }', - reduce=self.sum_js, - out=dict(inline=1)) - self.assertEqual(result['results'], [ dict(_id=1, value=2) ]) - - def test_mr_finalize(self): - result = self.bind.db.coll.map_reduce( - map='function(){ emit(1, this.a); }', - reduce=self.sum_js, - out=dict(inline=1), - finalize='function(k, v){ return v + 42; }') - self.assertEqual(result['results'], [ dict(_id=1, value=44) ]) - - def test_mr_merge(self): - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(merge='coll')) - self.assertEqual(result['result'], 'coll') - self.assertEqual( - sorted(list(self.bind.db.coll.find())), - sorted([ self.doc, dict(_id=1, value=3) ])) - - def test_mr_merge_collection(self): - result = self.bind.db.coll.map_reduce( - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(merge='coll')) - self.assertEqual(result['result'], 'coll') - self.assertEqual( - sorted(list(self.bind.db.coll.find())), - sorted([ self.doc, dict(_id=1, value=3) ])) - - def test_mr_replace(self): - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(replace='coll')) - self.assertEqual(result['result'], 'coll') - self.assertEqual( - list(self.bind.db.coll.find()), - [ dict(_id=1, value=3) ]) - - def test_mr_replace_collection(self): - result = self.bind.db.coll.map_reduce( - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(replace='coll')) - self.assertEqual(result['result'], 'coll') - self.assertEqual( - list(self.bind.db.coll.find()), - [ dict(_id=1, value=3) ]) - - def test_mr_reduce(self): - self.bind.db.reduce.insert(dict( - _id=1, value=42)) - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(reduce='reduce')) - self.assertEqual(result['result'], 'reduce') - self.assertEqual( - list(self.bind.db.reduce.find()), - [ dict(_id=1, value=45) ]) - - def test_mr_reduce_list(self): - self.bind.db.reduce.insert(dict( - _id=1, value=[42])) - result = self.bind.db.command( - 'mapreduce', 'coll', - map='function(){ emit(1, [1]); }', - reduce=self.concat_js, - out=dict(reduce='reduce')) - self.assertEqual(result['result'], 'reduce') - self.assertEqual( - list(self.bind.db.reduce.find()), - [ dict(_id=1, value=[1, 42]) ]) - - def test_mr_reduce_collection(self): - self.bind.db.reduce.insert(dict( - _id=1, value=42)) - result = self.bind.db.coll.map_reduce( - map='function(){ emit(1, this.a+1); }', - reduce=self.sum_js, - out=dict(reduce='reduce')) - self.assertEqual(result['result'], 'reduce') - self.assertEqual( - list(self.bind.db.reduce.find()), - [ dict(_id=1, value=45) ]) - - def test_mr_replace_number_key_obj(self): - # testing numerical keys nested in objects being reduced - self.bind.db.coll.remove() - docs = [ {'val': {'id': 1, 'c': 5}} ] - for d in docs: - self.bind.db.date_coll.insert(d) - result = self.bind.db.date_coll.map_reduce( - map='function(){ var d = {}; d[new String(this.val.id)] = this.val.c; emit("val", d); }', - reduce=self.first_js, - out=dict(replace='coll')) - self.assertEqual(result['result'], 'coll') - expected = [{'_id': 'val', 'value': {'1': 5}}] - self.assertEqual( - list(self.bind.db.coll.find()), - expected) - - def test_mr_inline_number_key_obj(self): - # testing numerical keys nested in objects being reduced - self.bind.db.coll.remove() - docs = [ {'val': {'id': 1, 'c': 5}} ] - for d in docs: - self.bind.db.date_coll.insert(d) - result = self.bind.db.date_coll.map_reduce( - map='function(){ var d = {}; d[new String(this.val.id)] = this.val.c; emit("val", d); }', - reduce=self.first_js, - out=dict(inline=1)) - expected = [{'_id': 'val', 'value': {'1': 5}}] - self.assertEqual(result['results'], expected) - - class TestCollection(TestCase): def setUp(self): @@ -560,14 +355,14 @@ def setUp(self): def test_getitem_clones(self): test = self.bind.db.test - test.insert({'a':'b'}) + test.insert_one({'a':'b'}) cursor = test.find() doc = cursor[0] self.assertEqual(cursor.next(), doc) def test_upsert_simple(self): test = self.bind.db.test - test.update( + test.update_many( dict(_id=0, a=5), {'$set': dict(b=6) }, upsert=True) @@ -576,21 +371,21 @@ def test_upsert_simple(self): def test_upsert_duplicated(self): test = self.bind.db.test - test.ensure_index([('a', 1)], unique=True) + test.create_index([('a', 1)], unique=True) # Try with any index - test.update({'x': 'NOT_FOUND1'}, {'$set': {'a': 0}}, upsert=True) + test.update_many({'x': 'NOT_FOUND1'}, {'$set': {'a': 0}}, upsert=True) try: - test.update({'x': 'NOT_FOUND2'}, {'$set': {'a': 0}}, upsert=True) + test.update_many({'x': 'NOT_FOUND2'}, {'$set': {'a': 0}}, upsert=True) except DuplicateKeyError: pass else: assert False, 'Had to detect duplicate key' # Now try with _id - test.update({'x': 'NOT_FOUND3'}, {'$set': {'_id': 0}}, upsert=True) + test.update_many({'x': 'NOT_FOUND3'}, {'$set': {'_id': 0}}, upsert=True) try: - test.update({'x': 'NOT_FOUND4'}, {'$set': {'_id': 0}}, upsert=True) + test.update_many({'x': 'NOT_FOUND4'}, {'$set': {'_id': 0}}, upsert=True) except DuplicateKeyError: pass else: @@ -598,7 +393,7 @@ def test_upsert_duplicated(self): def test_upsert_setOnInsert(self): test = self.bind.db.test - test.update( + test.update_many( dict(_id=0, a=5), {'$set': dict(b=6), '$setOnInsert': dict(c=7)}, @@ -606,8 +401,8 @@ def test_upsert_setOnInsert(self): doc = test.find_one() self.assertEqual(doc, dict(_id=0, a=5, b=6, c=7)) - test.update(dict(_id=0, a=5), {'$set': dict(b=0, c=0)}) - test.update( + test.update_many(dict(_id=0, a=5), {'$set': dict(b=0, c=0)}) + test.update_many( dict(_id=0, a=5), {'$set': dict(b=2), '$setOnInsert': dict(c=7)}, @@ -617,7 +412,7 @@ def test_upsert_setOnInsert(self): def test_upsert_inc(self): test = self.bind.db.test - test.update( + test.update_many( dict(_id=0, a=5), {'$inc': dict(a=2, b=3) }, upsert=True) @@ -626,7 +421,7 @@ def test_upsert_inc(self): def test_upsert_push(self): test = self.bind.db.test - test.update( + test.update_many( dict(_id=0, a=5), {'$push': dict(c=1) }, upsert=True) @@ -634,29 +429,29 @@ def test_upsert_push(self): self.assertEqual(doc, dict(_id=0, a=5, c=[1])) def test_update_addToSet_with_each(self): - self.bind.db.coll.insert({'_id': 0, 'a': [1, 2, 3]}) - self.bind.db.coll.update({}, + self.bind.db.coll.insert_one({'_id': 0, 'a': [1, 2, 3]}) + self.bind.db.coll.update_many({}, {'$addToSet': {'a': {'$each': [0, 2, 4]}}}) doc = self.bind.db.coll.find_one() self.assertEqual(len(doc['a']), 5) def test_find_with_skip(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result = self.bind.db.coll.find({}, skip=2) result = list(result) self.assertEqual(len(result), 3) def test_find_with_limit(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result = self.bind.db.coll.find({}, limit=2) result = list(result) self.assertEqual(len(result), 2) def test_find_with_slice_skip(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result = self.bind.db.coll.find().sort('a')[3:] result = list(result) self.assertEqual(len(result), 2) @@ -664,7 +459,7 @@ def test_find_with_slice_skip(self): def test_find_with_slice_limit(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result = self.bind.db.coll.find().sort('a')[:2] result = list(result) self.assertEqual(len(result), 2) @@ -672,7 +467,7 @@ def test_find_with_slice_limit(self): def test_find_with_slice_skip_limit(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result = self.bind.db.coll.find().sort('a')[2:4] result = list(result) self.assertEqual(len(result), 2) @@ -687,7 +482,7 @@ def test_find_with_slice_invalid(self): def test_find_with_paging(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':i}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':i}) result_all = self.bind.db.coll.find() result_all = list(result_all) result_page1 = self.bind.db.coll.find({}, skip=0, limit=3) @@ -697,19 +492,19 @@ def test_find_with_paging(self): def test_distinct(self): for i in range(5): - self.bind.db.coll.insert({'_id':str(i), 'a':'A'}) + self.bind.db.coll.insert_one({'_id':str(i), 'a':'A'}) result = self.bind.db.coll.distinct('a') self.assertEqual(result, ['A']) def test_distinct_subkey(self): for i in range(5): - self.bind.db.coll.insert({'_id': str(i), 'a': {'b': 'A'}}) + self.bind.db.coll.insert_one({'_id': str(i), 'a': {'b': 'A'}}) result = self.bind.db.coll.distinct('a.b') self.assertEqual(result, ['A']) def test_distinct_sublist(self): for i in range(5): - self.bind.db.coll.insert({'_id': str(i), + self.bind.db.coll.insert_one({'_id': str(i), 'a': [{'b': 'A', 'z': 'z', 'f': {'f': 'F'}}, {'b': 'C', 'z': 'z', 'f': {'f': 'G'}}]}) result = self.bind.db.coll.distinct('a.b') @@ -721,51 +516,90 @@ def test_distinct_sublist(self): def test_distinct_filtered(self): for i in range(5): - self.bind.db.coll.insert({'_id': i, 'a': 'A'}) + self.bind.db.coll.insert_one({'_id': i, 'a': 'A'}) result = self.bind.db.coll.distinct('_id', filter={'_id': {'$lte': 2}}) self.assertEqual(set(result), {0, 1, 2}) - def test_find_and_modify_returns_none_on_no_entries(self): - self.assertEqual(None, self.bind.db.foo.find_and_modify({'i': 1}, {'$set': {'i': 2}})) - - def test_find_and_modify_returns_none_on_upsert_and_no_new(self): - self.assertEqual(None, self.bind.db.foo.find_and_modify({'i': 1}, - {'$set': {'i': 2}}, - upsert=True, new=False)) - - def test_find_and_modify_returns_old_value_on_no_new(self): - self.bind.db.foo.insert({'_id': 1, 'i': 1}) - self.assertEqual({'_id': 1, 'i': 1}, self.bind.db.foo.find_and_modify({'i': 1}, - {'$set': {'i': 2}}, - new=False)) - - def test_find_and_modify_returns_new_value_on_new(self): - self.bind.db.foo.insert({'_id': 1, 'i': 1}) - self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_and_modify({'i': 1}, - {'$set': {'i': 2}}, - new=True)) - - def test_find_and_modify_returns_new_value_on_new_filter_id(self): - self.bind.db.foo.insert({'i': 1}) - self.assertEqual({'i': 2}, self.bind.db.foo.find_and_modify({'i': 1}, - {'$set': {'i': 2}}, - fields={'_id': False, 'i': True}, - new=True)) - - def test_find_and_modify_returns_new_value_on_new_upsert(self): - self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_and_modify({'i': 1}, - {'$set': {'_id': 1, - 'i': 2}}, - new=True, - upsert=True)) + def test_find_one_and_update_returns_none_on_no_entries(self): + self.assertEqual(None, self.bind.db.foo.find_one_and_update({'i': 1}, {'$set': {'i': 2}})) - def test_find_and_modify_with_remove(self): - self.bind.db.col.insert({'_id': 1}) - self.assertEqual({'_id': 1}, self.bind.db.col.find_and_modify({'_id': 1}, remove=True)) - self.assertEqual(0, self.bind.db.col.count()) + def test_find_one_and_update_returns_none_on_upsert_and_no_new(self): + self.assertEqual(None, self.bind.db.foo.find_one_and_update({'i': 1}, + {'$set': {'i': 2}}, + upsert=True, return_document=False)) + + def test_find_one_and_replace_returns_none_on_upsert_and_no_new(self): + self.assertEqual(None, self.bind.db.foo.find_one_and_replace({'i': 1}, + {'i': 2}, + upsert=True, return_document=False)) + + def test_find_one_and_update_returns_old_value_on_no_return_document(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + self.assertEqual({'_id': 1, 'i': 1}, self.bind.db.foo.find_one_and_update({'i': 1}, + {'$set': {'i': 2}}, + return_document=False)) + + def test_one_and_update_returns_new_value_on_new(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_one_and_update({'i': 1}, + {'$set': {'i': 2}}, + return_document=True)) + + def test_find_one_and_replace_returns_new_value_on_new(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_one_and_replace({'i': 1}, + {'i': 2}, + return_document=True)) + + def test_find_one_and_replace_ignores_id(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_one_and_replace({'i': 1}, + {'i': 2}, + return_document=True)) + + def test_find_one_and_replace_fails_with_set(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + with self.assertRaises(ValueError): + self.bind.db.foo.find_one_and_replace({'i': 1}, + {'$set': {'i': 2}}, + return_document=True) + + def test_find_one_and_update_returns_new_value_on_new_filter_id(self): + self.bind.db.foo.insert_one({'i': 1}) + self.assertEqual({'i': 2}, self.bind.db.foo.find_one_and_update({'i': 1}, + {'$set': {'i': 2}}, + projection={'_id': False, 'i': True}, + return_document=True)) + + def test_find_one_and_update_returns_new_value_on_new_upsert(self): + self.assertEqual({'_id': 1, 'i': 2}, self.bind.db.foo.find_one_and_update({'i': 1}, + {'$set': {'_id': 1, 'i': 2}}, + return_document=True, + upsert=True)) + + def test_find_one_and_update_fails_with_id(self): + self.bind.db.foo.insert_one({'_id': 1, 'i': 1}) + with self.assertRaises(ValueError): + self.bind.db.foo.find_one_and_update({'i': 1}, + {'_id': 2, 'i': 2}, + return_document=True) + + def test_find_one_and_replace_returns_new_value_on_new_upsert(self): + doc = self.bind.db.foo.find_one_and_replace({'i': 1}, + {'i': 2}, + return_document=True, + upsert=True) + self.assertIsInstance(doc.pop("_id"), bson.ObjectId) + self.assertEqual({'i': 2}, doc) + + def test_find_one_and_delete_returns_projection(self): + self.bind.db.col.insert_one({'_id': 1, 'i': 1}) + self.assertEqual({'i': 1}, self.bind.db.col.find_one_and_delete({'_id': 1}, + projection={'_id': False, 'i': True})) + self.assertEqual(0, self.bind.db.col.estimated_document_count()) def test_hint_simple(self): - self.bind.db.coll.ensure_index([('myindex', 1)]) + self.bind.db.coll.create_index([('myindex', 1)]) cursor = self.bind.db.coll.find().hint([('$natural', 1)]) self.assertEqual(type(cursor), type(self.bind.db.coll.find())) @@ -782,7 +616,7 @@ def test_hint_invalid(self): self.assertRaises(TypeError, self.bind.db.coll.find().hint, 123) def test_index_information(self): - self.bind.db.coll.ensure_index([('myfield', 1)], + self.bind.db.coll.create_index([('myfield', 1)], background=True, expireAfterSeconds=42, unique=True) @@ -792,120 +626,98 @@ def test_index_information(self): self.assertEqual(info['myfield']['expireAfterSeconds'], 42) self.assertEqual(info['myfield']['unique'], True) - def test_insert_manipulate_false(self): - doc = {'x': 1} - self.bind.db.coll.insert(doc, manipulate=False) - self.assertEqual(doc, {'x': 1}) - - def test_insert_manipulate_true(self): - doc = {'x': 1} - sample_id = bson.ObjectId() - # Cannot patch the class itself, otherwise isinstance() checks will fail on PyPy - with patch('bson.ObjectId.__init__', autospec=True, return_value=None, side_effect=lambda *args: args[0]._ObjectId__validate(sample_id)): - self.bind.db.coll.insert(doc, manipulate=True) - self.assertEqual(doc, {'x': 1, '_id': sample_id}) - - def test_save_id(self): - doc = {'_id': bson.ObjectId(), 'x': 1} - self.bind.db.coll.save(doc) - - def test_save_no_id(self): - doc = {'x': 1} - self.bind.db.coll.save(doc) - assert isinstance(doc['_id'], bson.ObjectId) - def test_unique_index_subdocument(self): coll = self.bind.db.coll - coll.ensure_index([('x.y', 1)], unique=True) - coll.insert({'x': {'y': 1}}) - coll.insert({'x': {'y': 2}}) - self.assertRaises(DuplicateKeyError, coll.insert, {'x': {'y': 2}}) + coll.create_index([('x.y', 1)], unique=True) + coll.insert_one({'x': {'y': 1}}) + coll.insert_one({'x': {'y': 2}}) + self.assertRaises(DuplicateKeyError, coll.insert_one, {'x': {'y': 2}}) def test_unique_index_whole_sdoc(self): coll = self.bind.db.coll - coll.ensure_index([('x', 1)], unique=True) - coll.insert({'x': {'y': 1}}) - coll.insert({'x': {'y': 2}}) - self.assertRaises(DuplicateKeyError, coll.insert, {'x': {'y': 2}}) + coll.create_index([('x', 1)], unique=True) + coll.insert_one({'x': {'y': 1}}) + coll.insert_one({'x': {'y': 2}}) + self.assertRaises(DuplicateKeyError, coll.insert_one, {'x': {'y': 2}}) def test_unique_sparse_index_subdocument(self): coll = self.bind.db.coll - coll.ensure_index([('x.y', 1)], unique=True, sparse=True) - coll.insert({'x': {'y': 1}}) + coll.create_index([('x.y', 1)], unique=True, sparse=True) + coll.insert_one({'x': {'y': 1}}) # no duplicate key error on these: - coll.insert({'x': {'y': None}}) - coll.insert({'x': {'y': None}}) - coll.insert({'x': {'other': 'field'}}) - coll.insert({'x': {'other': 'field'}}) + coll.insert_one({'x': {'y': None}}) + coll.insert_one({'x': {'y': None}}) + coll.insert_one({'x': {'other': 'field'}}) + coll.insert_one({'x': {'other': 'field'}}) # still errors on an existing duplication - self.assertRaises(DuplicateKeyError, coll.insert, {'x': {'y': 1}}) + self.assertRaises(DuplicateKeyError, coll.insert_one, {'x': {'y': 1}}) def test_unique_sparse_index_whole_sdoc(self): coll = self.bind.db.coll - coll.ensure_index([('x', 1)], unique=True, sparse=True) - coll.insert({'x': {'y': 1}}) + coll.create_index([('x', 1)], unique=True, sparse=True) + coll.insert_one({'x': {'y': 1}}) # no duplicate key error on these: - coll.insert({'x': None}) - coll.insert({'x': None}) - coll.insert({'other': 'field'}) - coll.insert({'other': 'field'}) + coll.insert_one({'x': None}) + coll.insert_one({'x': None}) + coll.insert_one({'other': 'field'}) + coll.insert_one({'other': 'field'}) # still errors on an existing duplication - self.assertRaises(DuplicateKeyError, coll.insert, {'x': {'y': 1}}) + self.assertRaises(DuplicateKeyError, coll.insert_one, {'x': {'y': 1}}) def test_delete_many(self): coll = self.bind.db.coll - coll.insert({'dme-m': 1}) - coll.insert({'dme-m': 1}) - coll.insert({'dme-m': 2}) + coll.insert_one({'dme-m': 1}) + coll.insert_one({'dme-m': 1}) + coll.insert_one({'dme-m': 2}) self.assertEqual(coll.delete_many({'dme-m': 1}).deleted_count, 2) def test_delete_one(self): coll = self.bind.db.coll - coll.insert({'dme-o': 1}) - coll.insert({'dme-o': 1}) - coll.insert({'dme-o': 2}) + coll.insert_one({'dme-o': 1}) + coll.insert_one({'dme-o': 1}) + coll.insert_one({'dme-o': 2}) self.assertEqual(coll.delete_one({'dme-o': 1}).deleted_count, 1) def test_find_one_and_delete(self): coll = self.bind.db.coll - coll.insert({'dme-o': 1}) - coll.insert({'dme-o': 1}) - coll.insert({'dme-o': 2}) + coll.insert_one({'_id': 1, 'dme-o': 1}) + coll.insert_one({'_id': 2, 'dme-o': 1}) + coll.insert_one({'_id': 3, 'dme-o': 2}) - coll.find_one_and_delete({'dme-o': 1}) + self.assertEqual({'_id': 1, 'dme-o': 1}, coll.find_one_and_delete({'dme-o': 1})) self.assertEqual(len(list(coll.find({'dme-o': {'$exists': True}}))), 2) def test_find_bytes(self): coll = self.bind.db.coll # bytes - coll.insert({'x': b'foo'}) + coll.insert_one({'x': b'foo'}) self.assertIsNotNone(coll.find_one({'x': b'foo'})) self.assertIsNotNone(coll.find_one({'x': bson.Binary(b'foo')})) # Binary, same as bytes - coll.insert({'x': bson.Binary(b'bar')}) + coll.insert_one({'x': bson.Binary(b'bar')}) self.assertIsNotNone(coll.find_one({'x': b'bar'})) self.assertIsNotNone(coll.find_one({'x': bson.Binary(b'bar')})) # Binary with different subtype, NOT like bytes - coll.insert({'x': bson.Binary(b'woah', bson.binary.USER_DEFINED_SUBTYPE)}) + coll.insert_one({'x': bson.Binary(b'woah', bson.binary.USER_DEFINED_SUBTYPE)}) self.assertIsNone(coll.find_one({'x': b'woah'})) self.assertIsNone(coll.find_one({'x': bson.Binary(b'woah')})) self.assertIsNotNone(coll.find_one({'x': bson.Binary(b'woah', bson.binary.USER_DEFINED_SUBTYPE)})) def test_find_RawBSONDocument(self): coll = self.bind.db.coll - coll.insert({'x': 5}) + coll.insert_one({'x': 5}) # real simple filter result = coll.find_one(RawBSONDocument(bson.encode({ 'x': 5 @@ -917,6 +729,14 @@ def test_find_RawBSONDocument(self): }))) self.assertIsNotNone(result) + def test_find_UUID(self): + coll = self.bind.db.coll + uu = uuid.UUID('{12345678-1234-5678-1234-567812345678}') + coll.insert_one({'x': uu}) + # real simple filter + result = coll.find_one({'x': uu}) + self.assertIsNotNone(result) + class TestBsonCompare(TestCase): @@ -1088,8 +908,8 @@ def setUp(self): def test_update_one(self): coll = self.bind.db.coll - coll.insert({'dme-o': 1}) - coll.insert({'dme-o': 1}) + coll.insert_one({'dme-o': 1}) + coll.insert_one({'dme-o': 1}) coll.bulk_write([ UpdateOne({'dme-o': 1}, {'$set': {'dme-o': 2}}) @@ -1104,10 +924,10 @@ class TestAggregate(TestCase): def setUp(self): self.bind = create_datastore('mim:///testdb') self.bind.conn.drop_all() - self.bind.db.coll.insert({'_id':'foo', 'a':2, 'c':[1,2,3], + self.bind.db.coll.insert_one({'_id':'foo', 'a':2, 'c':[1,2,3], 'z': {'egg': 'spam', 'spam': 'egg'}}) for r in range(4): - self.bind.db.rcoll.insert({'_id':'r%s' % r, 'd':r}) + self.bind.db.rcoll.insert_one({'_id':'r%s' % r, 'd':r}) def test_aggregate_match(self): res = self.bind.db.rcoll.aggregate([{'$match': {'d': {'$lt': 2}}}]) diff --git a/ming/tests/test_schema.py b/ming/tests/test_schema.py index 51b9f04..e362764 100644 --- a/ming/tests/test_schema.py +++ b/ming/tests/test_schema.py @@ -15,7 +15,7 @@ class TestQuerySafety(TestCase): def setUp(self): self.bind = ming.create_datastore('mim:///datastore') self.bind.conn.drop_all() - self.bind.db.coll.insert({'_id':'foo', 'a':2, 'b':3}) + self.bind.db.coll.insert_one({'_id':'foo', 'a':2, 'b':3}) self.session = ming.Session(self.bind) class Doc(Document): class __mongometa__: diff --git a/ming/tests/test_session.py b/ming/tests/test_session.py index fd64cb0..766df84 100644 --- a/ming/tests/test_session.py +++ b/ming/tests/test_session.py @@ -57,54 +57,60 @@ def test_base_session(self): dict(a=None, b=dict(a=None), _id=None, cc=dict(dd=None, ee=None))) sess.find(TestDoc, dict(a=5)) + impl.find_one.assert_called_with(dict(a=5)) sess.remove(TestDoc, dict(a=5)) - sess.group(TestDoc, 'a') + impl.delete_many.assert_called_with(dict(a=5)) sess.update_partial(TestDoc, dict(a=5), dict(b=6), False) + impl.update_one.assert_called_with(dict(a=5), dict(b=6), False) - impl.find_one.assert_called_with(dict(a=5)) impl.find.assert_called_with(dict(a=5)) - impl.remove.assert_called_with(dict(a=5)) - impl.group.assert_called_with('a') - impl.update.assert_called_with(dict(a=5), dict(b=6), False) doc = TestDoc({}) sess.save(doc) + impl.replace_one.assert_called_with(dict(_id=doc._id), doc, upsert=True) self.assertEqual(doc.a, None) self.assertEqual(doc.b, dict(a=None)) del doc._id sess.insert(doc) - sess.delete(doc) - impl.save.assert_called_with(doc) impl.insert_one.assert_called_with(doc) - impl.remove.assert_called_with(dict(_id=None)) + sess.delete(doc) + impl.delete_one.assert_called_with(dict(_id=None)) + impl.reset_mock() doc = self.TestDocNoSchema({'_id':5, 'a':5}) sess.save(doc) - impl.save.assert_called_with(dict(_id=5, a=5)) + impl.replace_one.assert_called_with(dict(_id=5), dict(_id=5, a=5), upsert=True) doc = self.TestDocNoSchema({'_id':5, 'a':5, 'b': 6}) + + impl.reset_mock() sess.save(doc, 'a') - impl.update.assert_called_with(dict(_id=5), {'$set':dict(a=5)}) + impl.update_one.assert_called_with(dict(_id=5), {'$set':dict(a=5)}) + impl.reset_mock() doc = self.TestDocNoSchema({'_id':5, 'a':5}) impl.insert_one.return_value = bson.ObjectId() + impl.reset_mock() sess.insert(doc) impl.insert_one.assert_called_with(dict(_id=5, a=5)) + impl.reset_mock() doc = self.TestDocNoSchema({'_id':5, 'a':5}) sess.upsert(doc, ['a']) - impl.update.assert_called_with(dict(a=5), dict(_id=5, a=5), upsert=True) + impl.update_one.assert_called_with(dict(a=5), {'$set': dict(_id=5, a=5)}, upsert=True) + impl.reset_mock() sess.upsert(doc, '_id') - impl.update.assert_called_with(dict(_id=5), dict(_id=5, a=5), upsert=True) + impl.update_one.assert_called_with(dict(_id=5), {'$set': dict(_id=5, a=5)}, upsert=True) + impl.reset_mock() sess.find_by(self.TestDoc, a=5) sess.count(self.TestDoc) sess.ensure_index(self.TestDoc, 'a') impl.find.assert_called_with(dict(a=5)) - impl.count.assert_called_with() - impl.ensure_index.assert_called_with([ ('a', pymongo.ASCENDING) ]) - impl.ensure_index.reset_mock() + impl.estimated_document_count.assert_called_with() + impl.create_index.assert_called_with([ ('a', pymongo.ASCENDING) ]) + impl.create_index.reset_mock() sess.ensure_indexes(self.TestDoc) self.assertEqual( - impl.ensure_index.call_args_list, [ + impl.create_index.call_args_list, [ ( ([ ('b', pymongo.ASCENDING), ('c', pymongo.ASCENDING) ],), {'unique':False, 'sparse':False, 'background': True} ), @@ -124,11 +130,11 @@ def test_base_session(self): doc = self.TestDocNoSchema(dict(_id=1, a=5)) sess.increase_field(doc, a=60) - impl.update.assert_called_with(dict(_id=1, a={'$lt': 60}), - {'$set': dict(a=60)}) + impl.update_one.assert_called_with(dict(_id=1, a={'$lt': 60}), + {'$set': dict(a=60)}) sess.increase_field(doc, b=60) - impl.update.assert_called_with(dict(_id=1, b={'$lt': 60}), - {'$set': dict(b=60)}) + impl.update_one.assert_called_with(dict(_id=1, b={'$lt': 60}), + {'$set': dict(b=60)}) self.assertRaises(ValueError, sess.increase_field, doc, b=None) sess.index_information(self.TestDoc) @@ -147,11 +153,8 @@ def test_find_kwargs(self): def test_aggregations(self): # just check that they exist & run, no input/output checks self.TestDoc.m.aggregate() - self.TestDoc.m.map_reduce() - self.TestDoc.m.inline_map_reduce() self.TestDoc.m.distinct() self.TestDoc.m.find({'a': 'b'}).distinct() - self.TestDoc.m.group() diff --git a/setup.py b/setup.py index 6781ce9..448b30b 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ include_package_data=True, zip_safe=True, install_requires=[ - "pymongo>=3.0,<4", + "pymongo", "pytz", ], tests_require=[ diff --git a/tox.ini b/tox.ini index e97b217..c1a8ea7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,9 +1,10 @@ [tox] skip_missing_interpreters = True envlist = - py{38,39,310,311,312},pypy3 + py{38,39,310,311,312}-pymongo{3,4},pypy3-pymongo{3,4} [testenv] +install_command = pip install {opts} {packages} deps = setuptools pytz @@ -11,6 +12,8 @@ deps = webtest formencode coverage + pymongo3: pymongo<4 + pymongo4: pymongo>=4 commands = coverage run --source ming -m unittest discover -v - coverage report