diff --git a/.gitignore b/.gitignore index dc8ca9d..88b1570 100755 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,5 @@ couchdbkit.egg-info dist/ doc/_build/ distribute-* +data +jsonobject_couchdbkit.egg-info diff --git a/.hgignore b/.hgignore deleted file mode 100755 index 85a1737..0000000 --- a/.hgignore +++ /dev/null @@ -1,20 +0,0 @@ -syntax: glob -*.orig -*.rej -*~ -*.o -*.pyc -*.pyo -tests/*.err -*.swp -store/* -*.DS_Store -*.beam -.coverage -couchdbkit.egg-info -dist -examples/djangoapp/test.db - -syntax: regexp -.*\#.*\#$ - diff --git a/.hgtags b/.hgtags deleted file mode 100644 index 1e6a042..0000000 --- a/.hgtags +++ /dev/null @@ -1,38 +0,0 @@ -042d6587fe290b158f6779145a96651a894fc5f7 0.1.1 -99a25564cc09004aca0c921c0a94e92c3972c033 0.1.2 -69bc244110249a0b310dfde31b719dc7472bb3d7 0.1.3 -eb07f2cd97b65a5a3ff9ddaac0435effee589fe8 0.1.4 -9d8a3b20aa4e41565513ce1593701e1658c44efc 0.1.6 -7e5da2149fa5864a0390094d9c5dee88589233eb 0.1.6.1 -d9831b7c3d6d21a9827c171e92e84eb60889c481 0.1.7 -d8acae3e42268fd765e5b7c348435ba707d10636 0.1.7.1 -aedb19fb01cb8f4afc79027b20cdd6ebc275825f 0.1.7.2 -9e169d80ffb1e1fdf9131c2824c1ebc5037c5d95 0.1.8 -f9fa696b672130cf15a2d16d97c0cc862507d1fc 0.1.9 -c28f80517104b7a4dc402974959449ccfa60ab08 0.1.9.1 -67dddbdb8ae5d668662f2dcbcc7bf484adf50533 0.2 -ffa06885b498666efe9c069a53f640751a38d4e5 0.2.1 -e9316c36f8e73b3a016debcfd1d130f4c6df3a0d 0.2.2 -e9316c36f8e73b3a016debcfd1d130f4c6df3a0d 0.2.2 -d7e31f3829a8021d50b5f19f33f4a4ec5253a728 0.2.2 -7214410b4514880b8f6b09e6d7cbb5f5e746efa2 0.2.3 -7214410b4514880b8f6b09e6d7cbb5f5e746efa2 0.2.3 -be307befe36be7ff5b102f2fff36105db9a9f871 0.2.3 -903cd60cec4b11d1171e88a045a9a16c2fb03024 0.2.4 -903cd60cec4b11d1171e88a045a9a16c2fb03024 0.2.4 -3dc59a843ce0bad21a16bca4b4a9ec3d8397fd75 0.2.4 -d5c211047a6d2d98daa403c7ba7ceaa24b5a0585 0.3 -d5c211047a6d2d98daa403c7ba7ceaa24b5a0585 0.3 -65bb003bde529a1fc6669ccfbdef3abe5835e283 0.3 -57879431a4e200318378327af6dfe04884822e3e 0.3.1 -062d4b680c600c9cfbf2d96b90af80a291add1e3 0.4 -062d4b680c600c9cfbf2d96b90af80a291add1e3 0.4 -f8ddd60879ef6ed24a69b76ac1927c3fa1bdbb54 0.4 -f8ddd60879ef6ed24a69b76ac1927c3fa1bdbb54 0.4 -4eb125217ca6800e25deb185a6ced3f813f5727e 0.4 -4eb125217ca6800e25deb185a6ced3f813f5727e 0.4 -a1907763e888440b17353208de0fb78740587735 0.4 -b157f94ff805b10f6ec4118b78d25b9695d10fbf 0.4.1 -b157f94ff805b10f6ec4118b78d25b9695d10fbf 0.4.1 -c43c2a19151952af90d3b26a6ba4f4f726123e27 0.4.1 -631c681601e1be76a94dda6ffc7faa90e55ce9f1 0.4.2 diff --git a/.travis.yml b/.travis.yml index c85e3f1..954575d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,28 @@ language: python services: - - couchdb + - docker -python: - - 2.6 - - 2.7 +python: 2.7 + +install: + - pip install -r requirements_dev.txt + - python setup.py install + +before_script: + - "docker run -d --name couchdb-cluster \ + -p 5984:5984 \ + -v $(pwd)/data:/usr/src/couchdb/dev/lib/ \ + klaemo/couchdb:2.0-dev \ + --with-admin-party-please \ + --with-haproxy + -n 1" + - | + while : + do + curl http://localhost:5984/${db_name} -sv 2>&1 | grep '^< HTTP/.* 200 OK' && break || continue + sleep 1 + done -install: - - pip install -r requirements_dev.txt --use-mirrors - - python setup.py install script: python setup.py test diff --git a/README.rst b/README.rst index 25d9a0d..667595c 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,10 @@ +About the jsonobject fork of couchdbkit +--------------------------------------- +`jsonobject-couchdbkit`_ is a fork of couchdbkit that replaces couchdbkit.schema +with a thin wrapper around jsonobject +(which was, incidentally, written as a **way** faster replacement +for couchdbkit.schema.) See `jsonobject`_. + About ----- @@ -155,6 +162,8 @@ greets:: greets = Greeting.view('greeting/all') +.. _jsonobject-couchdbkit: https://github.com/dimagi/couchdbkit/tree/jsonobject +.. _jsonobject: http://github.com/dimagi/jsonobject .. _Couchdbkit: http://couchdbkit.org .. _API: http://couchdbkit.org/doc/api/ .. _couchapp: http://github.com/couchapp/couchapp/tree/ diff --git a/couchdbkit/__init__.py b/couchdbkit/__init__.py index 600ebff..1d7a35a 100644 --- a/couchdbkit/__init__.py +++ b/couchdbkit/__init__.py @@ -22,37 +22,11 @@ Property, IntegerProperty, DecimalProperty, BooleanProperty, FloatProperty, StringProperty, DateTimeProperty, DateProperty, TimeProperty, dict_to_json, dict_to_json, dict_to_json, - value_to_python, dict_to_python, + dict_to_python, DocumentSchema, DocumentBase, Document, StaticDocument, contain, QueryMixin, AttachmentMixin, SchemaProperty, SchemaListProperty, SchemaDictProperty, ListProperty, DictProperty, StringDictProperty, StringListProperty, SetProperty ) -import logging - -LOG_LEVELS = { - "critical": logging.CRITICAL, - "error": logging.ERROR, - "warning": logging.WARNING, - "info": logging.INFO, - "debug": logging.DEBUG -} - -def set_logging(level, handler=None): - """ - Set level of logging, and choose where to display/save logs - (file or standard output). - """ - if not handler: - handler = logging.StreamHandler() - - loglevel = LOG_LEVELS.get(level, logging.INFO) - logger = logging.getLogger('couchdbkit') - logger.setLevel(loglevel) - format = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" - datefmt = r"%Y-%m-%d %H:%M:%S" - - handler.setFormatter(logging.Formatter(format, datefmt)) - logger.addHandler(handler) - +from .logging import (LOG_LEVELS, set_logging, logger) diff --git a/couchdbkit/client.py b/couchdbkit/client.py index 2b70ed1..a9b0231 100644 --- a/couchdbkit/client.py +++ b/couchdbkit/client.py @@ -26,19 +26,31 @@ >>> del server['simplecouchdb_test'] """ +from __future__ import absolute_import -UNKOWN_INFO = {} - - +import base64 from collections import deque +from copy import deepcopy from itertools import groupby +import json from mimetypes import guess_type import time +import cloudant +from cloudant.client import CouchDB +from cloudant.database import CouchDatabase +from cloudant.document import Document +from cloudant.error import CloudantClientException +from cloudant.security_document import SecurityDocument +from requests.exceptions import HTTPError from restkit.util import url_quote +import six +from six.moves import filter +from six.moves.urllib.parse import urljoin, unquote +from couchdbkit.logging import error_logger from .exceptions import InvalidAttachment, NoResultFound, \ -ResourceNotFound, ResourceConflict, BulkSaveError, MultipleResultsFound + ResourceNotFound, ResourceConflict, BulkSaveError, MultipleResultsFound from . import resource from .utils import validate_dbname @@ -46,6 +58,8 @@ DEFAULT_UUID_BATCH_COUNT = 1000 +UNKOWN_INFO = {} + def _maybe_serialize(doc): if hasattr(doc, "to_json"): @@ -61,6 +75,7 @@ def _maybe_serialize(doc): return doc, False + class Server(object): """ Server object that allows you to access and manage a couchdb node. A Server object can be used like any `dict` object. @@ -103,6 +118,12 @@ def __init__(self, uri='http://127.0.0.1:5984', else: self.res = self.resource_class(uri, **client_opts) self._uuids = deque() + # admin_party is true, because the username/pass is passed in uri for now + self.cloudant_client = CouchDB('', '', url=uri, admin_party=True, connect=True) + + @property + def _request_session(self): + return self.cloudant_client.r_session def info(self): """ info of server @@ -111,17 +132,18 @@ def info(self): """ try: - resp = self.res.get() + resp = self._request_session.get(self.uri) + resp.raise_for_status() except Exception: return UNKOWN_INFO - return resp.json_body + return resp.json() def all_dbs(self): """ get list of databases in CouchDb host """ - return self.res.get('/_all_dbs').json_body + return self.cloudant_client.all_dbs() def get_db(self, dbname, **params): """ @@ -156,7 +178,10 @@ def delete_db(self, dbname): """ Delete database """ - del self[dbname] + try: + del self[dbname] + except CloudantClientException as e: + raise ResourceNotFound(six.text_type(e)) #TODO: maintain list of replications def replicate(self, source, target, **params): @@ -171,21 +196,21 @@ def replicate(self, source, target, **params): http://wiki.apache.org/couchdb/Replication """ - payload = { - "source": source, - "target": target, - } - payload.update(params) - resp = self.res.post('/_replicate', payload=payload) - return resp.json_body + replicator = cloudant.replicator.Replication(self.cloudant_client) + source_db = Database(self.cloudant_client, source) + target_db = Database(self.cloudant_client, target) + return replicator.create_replication(source_db, target_db, **params) def active_tasks(self): """ return active tasks """ - resp = self.res.get('/_active_tasks') - return resp.json_body + resp = self._request_session.get(urljoin(self.uri, '/_active_tasks')) + resp.raise_for_status() + return resp.json() def uuids(self, count=1): - return self.res.get('/_uuids', count=count).json_body + resp = self._request_session.get(urljoin(self.uri, '/_uuids'), params={'count': count}) + resp.raise_for_status() + return resp.json() def next_uuid(self, count=None): """ @@ -206,14 +231,12 @@ def __getitem__(self, dbname): return Database(self._db_uri(dbname), server=self) def __delitem__(self, dbname): - ret = self.res.delete('/%s/' % url_quote(dbname, - safe=":")).json_body - return ret + self.cloudant_client.delete_database(dbname) def __contains__(self, dbname): try: - self.res.head('/%s/' % url_quote(dbname, safe=":")) - except: + self.cloudant_client[dbname] + except KeyError: return False return True @@ -234,6 +257,7 @@ def _db_uri(self, dbname): dbname = url_quote(dbname, safe=":") return "/".join([self.uri, dbname]) + class Database(object): """ Object that abstract access to a CouchDB database A Database object can act as a Dict object. @@ -250,6 +274,7 @@ def __init__(self, uri, create=False, server=None, **params): """ self.uri = uri.rstrip('/') self.server_uri, self.dbname = self.uri.rsplit("/", 1) + self.cloudant_dbname = unquote(self.dbname) if server is not None: if not hasattr(server, 'next_uuid'): @@ -259,33 +284,44 @@ def __init__(self, uri, create=False, server=None, **params): else: self.server = server = Server(self.server_uri, **params) + self.cloudant_client = self.server.cloudant_client + validate_dbname(self.dbname) + self.cloudant_database = CouchDatabase(self.cloudant_client, self.cloudant_dbname) if create: - try: - self.server.res.head('/%s/' % self.dbname) - except ResourceNotFound: - self.server.res.put('/%s/' % self.dbname, **params).json_body + self.cloudant_database.create() self.res = server.res(self.dbname) + self._request_session = self.server._request_session + self.database_url = self.cloudant_database.database_url def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.dbname) + def _database_path(self, path): + return '/'.join([self.database_url, path]) + def info(self): """ Get database information @return: dict """ - return self.res.get().json_body + return self.cloudant_database.metadata() def set_security(self, secobj): """ set database securrity object """ - return self.res.put("/_security", payload=secobj).json_body + with SecurityDocument(self.cloudant_database) as sec_doc: + # context manager saves + for key in sec_doc: + del sec_doc[key] + for k, v in secobj.items(): + sec_doc[k] = v + return self.get_security() def get_security(self): """ get database secuirity object """ - return self.res.get("/_security").json_body + return self.cloudant_database.get_security_document() def compact(self, dname=None): """ compact database @@ -295,23 +331,20 @@ def compact(self, dname=None): path = "/_compact" if dname is not None: path = "%s/%s" % (path, resource.escape_docid(dname)) - res = self.res.post(path, headers={"Content-Type": - "application/json"}) - return res.json_body + path = self._database_path(path) + res = self._request_session.post(path, headers={"Content-Type": "application/json"}) + res.raise_for_status() + return res.json() def view_cleanup(self): - res = self.res.post('/_view_cleanup', headers={"Content-Type": - "application/json"}) - return res.json_body + return self.cloudant_database.view_cleanup() def flush(self): """ Remove all docs from a database except design docs.""" # save ddocs - all_ddocs = self.all_docs(startkey="_design", - endkey="_design/"+u"\u9999", - include_docs=True) + all_ddocs = self.all_docs(startkey="_design", endkey="_design/"+u"\u9999", include_docs=True) ddocs = [] for ddoc in all_ddocs: doc = ddoc['doc'] @@ -335,9 +368,7 @@ def flush(self): # we let a chance to the system to sync times = 0 while times < 10: - try: - self.server.res.head('/%s/' % self.dbname) - except ResourceNotFound: + if self.dbname in self.server: break time.sleep(0.2) times += 1 @@ -352,12 +383,8 @@ def doc_exist(self, docid): @param docid: str, document id @return: boolean, True if document exist """ - - try: - self.res.head(resource.escape_docid(docid)) - except ResourceNotFound: - return False - return True + doc = Document(self.cloudant_database, docid) + return doc.exists() def open_doc(self, docid, **params): """Get document from database @@ -380,16 +407,33 @@ def open_doc(self, docid, **params): if not hasattr(schema, "wrap"): raise TypeError("invalid schema") wrapper = schema.wrap + attachments = params.get('attachments', False) + + if isinstance(docid, six.text_type): + docid = docid.encode('utf-8') + doc = Document(self.cloudant_database, docid) + try: + doc.fetch() + except HTTPError as e: + if e.response.status_code == 404: + raise ResourceNotFound(json.loads(e.response.content)['reason']) + raise + doc_dict = dict(doc) + + if attachments and '_attachments' in doc_dict: + for attachment_name in doc_dict['_attachments']: + attachment_data = doc.get_attachment(attachment_name, attachment_type='binary') + doc_dict['_attachments'][attachment_name]['data'] = base64.b64encode(attachment_data) + del doc_dict['_attachments'][attachment_name]['stub'] + del doc_dict['_attachments'][attachment_name]['length'] - docid = resource.escape_docid(docid) - doc = self.res.get(docid, **params).json_body if wrapper is not None: if not callable(wrapper): raise TypeError("wrapper isn't a callable") - return wrapper(doc) + return wrapper(doc_dict) - return doc + return doc_dict get = open_doc def list(self, list_name, view_name, **params): @@ -479,8 +523,15 @@ def get_rev(self, docid): @return rev: str, the last revision of document. """ - response = self.res.head(resource.escape_docid(docid)) - return response['etag'].strip('"') + response = self._request_session.head(self._database_path(docid)) + try: + response.raise_for_status() + except HTTPError as e: + if e.response.status_code == 404: + raise ResourceNotFound + raise + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag + return response.headers['ETag'].strip('"').lstrip('W/"') def save_doc(self, doc, encode_attachments=True, force_update=False, **params): @@ -507,48 +558,56 @@ def save_doc(self, doc, encode_attachments=True, force_update=False, if '_attachments' in doc1 and encode_attachments: doc1['_attachments'] = resource.encode_attachments(doc['_attachments']) - if '_id' in doc: - docid = doc1['_id'] - docid1 = resource.escape_docid(doc1['_id']) + if '_id' in doc1: + docid = doc1['_id'].encode('utf-8') + couch_doc = Document(self.cloudant_database, docid) + couch_doc.update(doc1) try: - res = self.res.put(docid1, payload=doc1, - **params).json_body - except ResourceConflict: + # Copied from Document.save to ensure that a deleted doc cannot be saved. + headers = {} + headers.setdefault('Content-Type', 'application/json') + put_resp = couch_doc.r_session.put( + couch_doc.document_url, + data=couch_doc.json(), + headers=headers + ) + put_resp.raise_for_status() + data = put_resp.json() + super(Document, couch_doc).__setitem__('_rev', data['rev']) + except HTTPError as e: + if e.response.status_code != 409: + raise + if force_update: - doc1['_rev'] = self.get_rev(docid) - res =self.res.put(docid1, payload=doc1, - **params).json_body + couch_doc['_rev'] = self.get_rev(docid) + couch_doc.save() else: - raise + raise ResourceConflict + res = couch_doc else: - try: - doc['_id'] = self.server.next_uuid() - res = self.res.put(doc['_id'], payload=doc1, - **params).json_body - except: - res = self.res.post(payload=doc1, **params).json_body + res = self.cloudant_database.create_document(doc1) - if 'batch' in params and 'id' in res: - doc1.update({ '_id': res['id']}) + if 'batch' in params and ('id' in res or '_id' in res): + doc1.update({ '_id': res.get('_id')}) else: - doc1.update({'_id': res['id'], '_rev': res['rev']}) - + doc1.update({'_id': res.get('_id'), '_rev': res.get('_rev')}) if schema: - doc._doc = doc1 + for key, value in six.iteritems(doc.__class__.wrap(doc1)): + doc[key] = value else: doc.update(doc1) - return res + return { + 'id': res['_id'], + 'rev': res['_rev'], + 'ok': True, + } - def save_docs(self, docs, use_uuids=True, all_or_nothing=False, new_edits=None, - **params): + def save_docs(self, docs, use_uuids=True, new_edits=None, **params): """ bulk save. Modify Multiple Documents With a Single Request @param docs: list of docs @param use_uuids: add _id in doc who don't have it already set. - @param all_or_nothing: In the case of a power failure, when the database - restarts either all the changes will have been saved or none of them. - However, it does not do conflict checking, so the documents will @param new_edits: When False, this saves existing revisions instead of creating new ones. Used in the replication Algorithm. Each document should have a _revisions property that lists its revision history. @@ -557,6 +616,8 @@ def save_docs(self, docs, use_uuids=True, all_or_nothing=False, new_edits=None, """ + if not isinstance(docs, (list, tuple)): + docs = tuple(docs) docs1 = [] docs_schema = [] for doc in docs: @@ -579,20 +640,27 @@ def is_id(doc): if nextid: doc['_id'] = nextid - payload = { "docs": docs1 } - if all_or_nothing: - payload["all_or_nothing"] = True + payload = {"docs": docs1} if new_edits is not None: payload["new_edits"] = new_edits # update docs - results = self.res.post('/_bulk_docs', - payload=payload, **params).json_body + res = self._request_session.post( + self._database_path('_bulk_docs'), data=json.dumps(payload), + headers={"Content-Type": "application/json"}, **params) + res.raise_for_status() + results = res.json() errors = [] for i, res in enumerate(results): if 'error' in res: errors.append(res) + logging_context = dict( + method='save_docs', + params=params, + error=res['error'], + ) + error_logger.error("save_docs error", extra=logging_context) else: if docs_schema[i]: docs[i]._doc.update({ @@ -609,17 +677,13 @@ def is_id(doc): return results bulk_save = save_docs - def delete_docs(self, docs, all_or_nothing=False, - empty_on_delete=False, **params): + def delete_docs(self, docs, empty_on_delete=False, **params): """ bulk delete. It adds '_deleted' member to doc then uses bulk_save to save them. @param empty_on_delete: default is False if you want to make sure the doc is emptied and will not be stored as is in Apache CouchDB. - @param all_or_nothing: In the case of a power failure, when the database - restarts either all the changes will have been saved or none of them. - However, it does not do conflict checking, so the documents will .. seealso:: `HTTP Bulk Document API ` @@ -637,8 +701,7 @@ def delete_docs(self, docs, all_or_nothing=False, for doc in docs: doc['_deleted'] = True - return self.bulk_save(docs, use_uuids=False, - all_or_nothing=all_or_nothing, **params) + return self.bulk_save(docs, use_uuids=False, **params) bulk_delete = delete_docs @@ -654,16 +717,24 @@ def delete_doc(self, doc, **params): result = { 'ok': False } doc1, schema = _maybe_serialize(doc) + if isinstance(doc1, dict): if not '_id' or not '_rev' in doc1: raise KeyError('_id and _rev are required to delete a doc') - docid = resource.escape_docid(doc1['_id']) - result = self.res.delete(docid, rev=doc1['_rev'], **params).json_body - elif isinstance(doc1, basestring): # we get a docid - rev = self.get_rev(doc1) - docid = resource.escape_docid(doc1) - result = self.res.delete(docid, rev=rev, **params).json_body + couch_doc = Document(self.cloudant_database, doc1['_id']) + couch_doc['_rev'] = doc1['_rev'] + elif isinstance(doc1, six.string_types): # we get a docid + couch_doc = Document(self.cloudant_database, doc1) + couch_doc['_rev'] = self.get_rev(doc1) + + # manual request because cloudant library doesn't return result + res = self._request_session.delete( + couch_doc.document_url, + params={"rev": couch_doc["_rev"]}, + ) + res.raise_for_status() + result = res.json() if schema: doc._doc.update({ @@ -687,16 +758,16 @@ def copy_doc(self, doc, dest=None, headers=None): headers = {} doc1, schema = _maybe_serialize(doc) - if isinstance(doc1, basestring): + if isinstance(doc1, six.string_types): docid = doc1 else: - if not '_id' in doc1: + if '_id' not in doc1: raise KeyError('_id is required to copy a doc') docid = doc1['_id'] if dest is None: destination = self.server.next_uuid(count=1) - elif isinstance(dest, basestring): + elif isinstance(dest, six.string_types): if dest in self: dest = self.get(dest) destination = "%s?rev=%s" % (dest['_id'], dest['_rev']) @@ -710,10 +781,11 @@ def copy_doc(self, doc, dest=None, headers=None): if destination: headers.update({"Destination": str(destination)}) - result = self.res.copy('/%s' % docid, headers=headers).json_body - return result + resp = self._request_session.request('copy', self._database_path(docid), headers=headers) + resp.raise_for_status() + return resp.json() - return { 'ok': False } + return {'ok': False} def raw_view(self, view_path, params): if 'keys' in params: @@ -722,10 +794,6 @@ def raw_view(self, view_path, params): else: return self.res.get(view_path, **params) - def raw_temp_view(db, design, params): - return db.res.post('_temp_view', payload=design, - headers={"Content-Type": "application/json"}, **params) - def view(self, view_name, schema=None, wrapper=None, **params): """ get view results from database. viewname is generally a string like `designname/viewname". It return an ViewResults @@ -758,10 +826,6 @@ def view(self, view_name, schema=None, wrapper=None, **params): return ViewResults(self.raw_view, view_path, wrapper, schema, params) - def temp_view(self, design, schema=None, wrapper=None, **params): - """ get adhoc view results. Like view it reeturn a ViewResult object.""" - return ViewResults(self.raw_temp_view, design, wrapper, schema, params) - def search( self, view_name, handler='_fti/_design', wrapper=None, schema=None, **params): """ Search. Return results from search. Use couchdb-lucene with its default settings by default.""" @@ -777,8 +841,6 @@ def documents(self, schema=None, wrapper=None, **params): wrapper=wrapper, schema=schema, params=params) iterdocuments = documents - - def put_attachment(self, doc, content, name=None, content_type=None, content_length=None, headers=None): """ Add attachement to a document. All attachments are streamed. @@ -818,11 +880,13 @@ def put_attachment(self, doc, content, name=None, content_type=None, if not content: content = "" content_length = 0 + if name is None: if hasattr(content, "name"): name = content.name else: raise InvalidAttachment('You should provide a valid attachment name') + name = url_quote(name, safe="") if content_type is None: content_type = ';'.join(filter(None, guess_type(name))) @@ -831,7 +895,7 @@ def put_attachment(self, doc, content, name=None, content_type=None, headers['Content-Type'] = content_type # add appropriate headers - if content_length and content_length is not None: + if content_length: headers['Content-Length'] = content_length doc1, schema = _maybe_serialize(doc) @@ -865,9 +929,7 @@ def delete_attachment(self, doc, name, headers=None): doc.update(new_doc) return res['ok'] - - def fetch_attachment(self, id_or_doc, name, stream=False, - headers=None): + def fetch_attachment(self, id_or_doc, name, stream=False, headers=None): """ get attachment in a document @param id_or_doc: str or dict, doc id or document dict @@ -876,7 +938,7 @@ def fetch_attachment(self, id_or_doc, name, stream=False, @return: `restkit.httpc.Response` object """ - if isinstance(id_or_doc, basestring): + if isinstance(id_or_doc, six.string_types): docid = id_or_doc else: doc, schema = _maybe_serialize(id_or_doc) @@ -893,9 +955,10 @@ def fetch_attachment(self, id_or_doc, name, stream=False, def ensure_full_commit(self): """ commit all docs in memory """ - return self.res.post('_ensure_full_commit', headers={ - "Content-Type": "application/json" - }).json_body + path = self._database_path('_ensure_full_commit') + res = self._request_session.post(path, headers={"Content-Type": "application/json"}) + res.raise_for_status() + return res.json() def __len__(self): return self.info()['doc_count'] @@ -910,9 +973,8 @@ def __setitem__(self, docid, doc): doc['_id'] = docid self.save_doc(doc) - def __delitem__(self, docid): - self.delete_doc(docid) + self.delete_doc(docid) def __iter__(self): return self.documents().iterator() @@ -920,6 +982,7 @@ def __iter__(self): def __nonzero__(self): return (len(self) > 0) + class ViewResults(object): """ Object to retrieve view results. @@ -1096,6 +1159,3 @@ def __len__(self): def __nonzero__(self): return bool(len(self)) - - - diff --git a/couchdbkit/consumer/__init__.py b/couchdbkit/consumer/__init__.py index 32a877b..7b437b4 100644 --- a/couchdbkit/consumer/__init__.py +++ b/couchdbkit/consumer/__init__.py @@ -4,6 +4,7 @@ # See the NOTICE for more information. +from __future__ import absolute_import from .base import ConsumerBase OLD_CONSUMER_URIS = dict( diff --git a/couchdbkit/consumer/ceventlet.py b/couchdbkit/consumer/ceventlet.py index 3459287..94c5483 100644 --- a/couchdbkit/consumer/ceventlet.py +++ b/couchdbkit/consumer/ceventlet.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import traceback import eventlet diff --git a/couchdbkit/consumer/cgevent.py b/couchdbkit/consumer/cgevent.py index 019d868..1241e39 100644 --- a/couchdbkit/consumer/cgevent.py +++ b/couchdbkit/consumer/cgevent.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import traceback import gevent diff --git a/couchdbkit/designer/fs.py b/couchdbkit/designer/fs.py index 1d78272..a6a3aab 100644 --- a/couchdbkit/designer/fs.py +++ b/couchdbkit/designer/fs.py @@ -4,6 +4,7 @@ # See the NOTICE for more information. from __future__ import with_statement +from __future__ import absolute_import import base64 import copy from hashlib import md5 @@ -18,6 +19,8 @@ BulkSaveError from .macros import package_shows, package_views from .. import utils +import six +from six.moves import filter if os.name == 'nt': def _replace_backslash(name): @@ -86,13 +89,18 @@ def create(self): def push(self, dbs, atomic=True, force=False): """Push a doc to a list of database `dburls`. If noatomic is true each attachments will be sent one by one.""" + pushed = False for db in dbs: + db_push = False if atomic: doc = self.doc(db, force=force) - db.save_doc(doc, force_update=True) + if not self._compare_with_current_version(db, doc): + db_push = True + db.save_doc(doc, force_update=True) else: doc = self.doc(db, with_attachments=False, force=force) db.save_doc(doc, force_update=True) + db_push = True attachments = doc.get('_attachments') or {} @@ -102,9 +110,23 @@ def push(self, dbs, atomic=True, force=False): db.put_attachment(doc, open(filepath, "r"), name=name) - logger.debug("%s/%s had been pushed from %s" % (db.uri, - self.docid, self.docdir)) + if db_push: + logger.debug("%s/%s had been pushed from %s" % (db.uri, + self.docid, self.docdir)) + pushed |= db_push + return pushed + + def _compare_with_current_version(self, db, doc): + """:returns: True if docs match otherwise False""" + try: + olddoc = db.open_doc(self._doc['_id']) + except ResourceNotFound: + return False + + if '_attachments' not in olddoc: + olddoc['_attachments'] = {} + return doc == olddoc def attachment_stub(self, name, filepath): att = {} @@ -211,7 +233,7 @@ def doc(self, db=None, with_attachments=True, force=False): name = name[:-1] dmanifest[name] = i - for vname, value in self._doc['views'].iteritems(): + for vname, value in six.iteritems(self._doc['views']): if value and isinstance(value, dict): views[vname] = value else: @@ -391,10 +413,12 @@ def push(path, dbs, atomic=True, force=False, docid=None): dbs = [dbs] doc = document(path, create=False, docid=docid) - doc.push(dbs, atomic=atomic, force=force) + pushed = doc.push(dbs, atomic=atomic, force=force) docspath = os.path.join(path, '_docs') if os.path.exists(docspath): + pushed = True pushdocs(docspath, dbs, atomic=atomic) + return pushed def pushapps(path, dbs, atomic=True, export=False, couchapprc=False): """ push all couchapps in one folder like couchapp pushapps command @@ -425,7 +449,7 @@ def pushapps(path, dbs, atomic=True, export=False, couchapprc=False): docs = [doc.doc(db) for doc in apps] try: db.save_docs(docs) - except BulkSaveError, e: + except BulkSaveError as e: docs1 = [] for doc in e.errors: try: @@ -490,7 +514,7 @@ def pushdocs(path, dbs, atomic=True, export=False): docs1.append(newdoc) try: db.save_docs(docs1) - except BulkSaveError, e: + except BulkSaveError as e: # resolve conflicts docs1 = [] for doc in e.errors: @@ -562,7 +586,7 @@ def clone(db, docid, dest=None, rev=None): break - if isinstance(content, basestring): + if isinstance(content, six.string_types): _ref = md5(utils.to_bytestring(content)).hexdigest() if objects and _ref in objects: content = objects[_ref] @@ -594,7 +618,7 @@ def clone(db, docid, dest=None, rev=None): # second pass for missing key or in case # manifest isn't in app - for key in doc.iterkeys(): + for key in six.iterkeys(doc): if key.startswith('_'): continue elif key in ('couchapp'): @@ -614,11 +638,11 @@ def clone(db, docid, dest=None, rev=None): vs_dir = os.path.join(path, key) if not os.path.isdir(vs_dir): os.makedirs(vs_dir) - for vsname, vs_item in doc[key].iteritems(): + for vsname, vs_item in six.iteritems(doc[key]): vs_item_dir = os.path.join(vs_dir, vsname) if not os.path.isdir(vs_item_dir): os.makedirs(vs_item_dir) - for func_name, func in vs_item.iteritems(): + for func_name, func in six.iteritems(vs_item): filename = os.path.join(vs_item_dir, '%s.js' % func_name) utils.write_content(filename, func) @@ -627,7 +651,7 @@ def clone(db, docid, dest=None, rev=None): showpath = os.path.join(path, key) if not os.path.isdir(showpath): os.makedirs(showpath) - for func_name, func in doc[key].iteritems(): + for func_name, func in six.iteritems(doc[key]): filename = os.path.join(showpath, '%s.js' % func_name) utils.write_content(filename, func) @@ -644,9 +668,9 @@ def clone(db, docid, dest=None, rev=None): elif isinstance(doc[key], dict): if not os.path.isdir(filedir): os.makedirs(filedir) - for field, value in doc[key].iteritems(): + for field, value in six.iteritems(doc[key]): fieldpath = os.path.join(filedir, field) - if isinstance(value, basestring): + if isinstance(value, six.string_types): if value.startswith('base64-encoded;'): value = base64.b64decode(content[15:]) utils.write_content(fieldpath, value) @@ -654,7 +678,7 @@ def clone(db, docid, dest=None, rev=None): utils.write_json(fieldpath + '.json', value) else: value = doc[key] - if not isinstance(value, basestring): + if not isinstance(value, six.string_types): value = str(value) utils.write_content(filedir, value) @@ -669,7 +693,7 @@ def clone(db, docid, dest=None, rev=None): if not os.path.isdir(attachdir): os.makedirs(attachdir) - for filename in doc['_attachments'].iterkeys(): + for filename in six.iterkeys(doc['_attachments']): if filename.startswith('vendor'): attach_parts = utils.split_path(filename) vendor_attachdir = os.path.join(path, attach_parts.pop(0), diff --git a/couchdbkit/designer/macros.py b/couchdbkit/designer/macros.py index 10f20d9..d9fad95 100644 --- a/couchdbkit/designer/macros.py +++ b/couchdbkit/designer/macros.py @@ -31,6 +31,7 @@ hash when views are updated. """ +from __future__ import absolute_import import glob from hashlib import md5 import logging @@ -39,6 +40,7 @@ from ..exceptions import MacroError from ..utils import read_file, read_json, to_bytestring, json +import six logger = logging.getLogger(__name__) @@ -47,13 +49,13 @@ def package_shows(doc, funcs, app_dir, objs): apply_lib(doc, funcs, app_dir, objs) def package_views(doc, views, app_dir, objs): - for view, funcs in views.iteritems(): + for view, funcs in six.iteritems(views): if hasattr(funcs, "items"): apply_lib(doc, funcs, app_dir, objs) def apply_lib(doc, funcs, app_dir, objs): for k, v in funcs.items(): - if not isinstance(v, basestring): + if not isinstance(v, six.string_types): continue else: logger.debug("process function: %s" % k) @@ -61,7 +63,7 @@ def apply_lib(doc, funcs, app_dir, objs): try: funcs[k] = run_json_macros(doc, run_code_macros(v, app_dir), app_dir) - except ValueError, e: + except ValueError as e: raise MacroError( "Error running !code or !json on function \"%s\": %s" % (k, e)) if old_v != funcs[k]: @@ -80,7 +82,7 @@ def rreq(mo): if cnt.find("!code") >= 0: cnt = run_code_macros(cnt, app_dir) library += cnt - except IOError, e: + except IOError as e: raise MacroError(str(e)) filenum += 1 @@ -109,7 +111,7 @@ def rjson(mo): library = read_json(filename) else: library = read_file(filename) - except IOError, e: + except IOError as e: raise MacroError(str(e)) filenum += 1 current_file = filename.split(app_dir)[1] @@ -154,7 +156,7 @@ def rjson2(mo): if not included: return f_string - for k, v in included.iteritems(): + for k, v in six.iteritems(included): varstrings.append("var %s = %s;" % (k, json.dumps(v).encode('utf-8'))) return re_json.sub(rjson2, f_string) diff --git a/couchdbkit/exceptions.py b/couchdbkit/exceptions.py index a6c3fff..575377b 100644 --- a/couchdbkit/exceptions.py +++ b/couchdbkit/exceptions.py @@ -6,7 +6,9 @@ """ All exceptions used in couchdbkit. """ -from restkit.errors import ResourceError, RequestFailed +from __future__ import absolute_import +from restkit.errors import ResourceError +import jsonobject.exceptions class InvalidAttachment(Exception): """ raised when an attachment is invalid """ @@ -15,9 +17,7 @@ class DuplicatePropertyError(Exception): """ exception raised when there is a duplicate property in a model """ -class BadValueError(Exception): - """ exception raised when a value can't be validated - or is required """ +BadValueError = jsonobject.exceptions.BadValueError class MultipleResultsFound(Exception): """ exception raised when more than one object is diff --git a/couchdbkit/ext/django/__init__.py b/couchdbkit/ext/django/__init__.py index 0897262..b5bc1e2 100644 --- a/couchdbkit/ext/django/__init__.py +++ b/couchdbkit/ext/django/__init__.py @@ -88,12 +88,13 @@ def home(request): To create databases and sync views, just run the usual `syncdb` command. It won't destroy your datas, just synchronize views. """ - +from __future__ import absolute_import from django.db.models import signals -def syncdb(app, created_models, verbosity=2, **kwargs): + +def syncdb(app_config, verbosity=2, **kwargs): """ function used by syncdb signal """ from couchdbkit.ext.django.loading import couchdbkit_handler - couchdbkit_handler.sync(app, verbosity=verbosity) + couchdbkit_handler.sync(app_config, verbosity=verbosity) -signals.post_syncdb.connect(syncdb) +signals.post_migrate.connect(syncdb) diff --git a/couchdbkit/ext/django/forms.py b/couchdbkit/ext/django/forms.py index 8b5e4ab..8c9ef1d 100644 --- a/couchdbkit/ext/django/forms.py +++ b/couchdbkit/ext/django/forms.py @@ -78,12 +78,14 @@ """ +from __future__ import absolute_import +from collections import OrderedDict from django.utils.text import capfirst -from django.utils.datastructures import SortedDict from django.forms.util import ErrorList from django.forms.forms import BaseForm, get_declared_fields from django.forms import fields as f from django.forms.widgets import media_property +import six FIELDS_PROPERTES_MAPPING = { "StringProperty": f.CharField, @@ -120,7 +122,7 @@ def document_to_dict(instance, properties=None, exclude=None): def fields_for_document(document, properties=None, exclude=None): """ - Returns a ``SortedDict`` containing form fields for the given document. + Returns a ``OrderedDict`` containing form fields for the given document. ``properties`` is an optional list of properties names. If provided, only the named properties will be included in the returned properties. @@ -136,7 +138,7 @@ def fields_for_document(document, properties=None, exclude=None): values = [document._properties[prop] for prop in properties if \ prop in document._properties] else: - values = document._properties.values() + values = list(document._properties.values()) values.sort(lambda a, b: cmp(a.creation_counter, b.creation_counter)) for prop in values: @@ -162,7 +164,7 @@ def fields_for_document(document, properties=None, exclude=None): field_list.append((prop.name, FIELDS_PROPERTES_MAPPING[property_class_name](**defaults))) - return SortedDict(field_list) + return OrderedDict(field_list) class DocumentFormOptions(object): def __init__(self, options=None): @@ -263,6 +265,5 @@ def save(self, commit=True, dynamic=True): return self.instance -class DocumentForm(BaseDocumentForm): +class DocumentForm(six.with_metaclass(DocumentFormMetaClass, BaseDocumentForm)): """ The document form object """ - __metaclass__ = DocumentFormMetaClass diff --git a/couchdbkit/ext/django/loading.py b/couchdbkit/ext/django/loading.py index e89bcd4..d867e45 100644 --- a/couchdbkit/ext/django/loading.py +++ b/couchdbkit/ext/django/loading.py @@ -19,8 +19,11 @@ and manage db sessions """ +from __future__ import absolute_import +from __future__ import print_function import sys import os +from collections import OrderedDict from restkit import BasicAuth from couchdbkit import Server @@ -28,7 +31,7 @@ from couchdbkit.resource import CouchdbResource from couchdbkit.exceptions import ResourceNotFound from django.conf import settings -from django.utils.datastructures import SortedDict +import six COUCHDB_DATABASES = getattr(settings, "COUCHDB_DATABASES", []) COUCHDB_TIMEOUT = getattr(settings, "COUCHDB_TIMEOUT", 300) @@ -39,7 +42,7 @@ class CouchdbkitHandler(object): # share state between instances __shared_state__ = dict( _databases = {}, - app_schema = SortedDict() + app_schema = OrderedDict() ) def __init__(self, databases): @@ -55,7 +58,7 @@ def __init__(self, databases): ) # create databases sessions - for app_name, app_setting in databases.iteritems(): + for app_name, app_setting in six.iteritems(databases): uri = app_setting['URL'] # Do not send credentials when they are both None as admin party will give a 401 @@ -88,9 +91,9 @@ def sync(self, app, verbosity=2, temp=None): When temp is specified, it is appended to the app's name on the docid. It can then be updated in the background and copied over the existing design docs to reduce blocking time of view updates """ - app_name = app.__name__.rsplit('.', 1)[0] + app_name = app.name.rsplit('.', 1)[0] app_labels = set() - schema_list = self.app_schema.values() + schema_list = list(self.app_schema.values()) for schema_dict in schema_list: for schema in schema_dict.values(): app_module = schema.__module__.rsplit(".", 1)[0] @@ -100,14 +103,14 @@ def sync(self, app, verbosity=2, temp=None): if not app_label in self._databases: continue if verbosity >=1: - print "sync `%s` in CouchDB" % app_name + print("sync `%s` in CouchDB" % app_name) db = self.get_db(app_label) - app_path = os.path.abspath(os.path.join(sys.modules[app.__name__].__file__, "..")) + app_path = app.path design_path = "%s/%s" % (app_path, "_design") if not os.path.isdir(design_path): if settings.DEBUG: - print >>sys.stderr, "%s don't exists, no ddoc synchronized" % design_path + print("%s don't exists, no ddoc synchronized" % design_path, file=sys.stderr) return if temp: @@ -122,10 +125,10 @@ def sync(self, app, verbosity=2, temp=None): if temp: ddoc = db[docid] - view_names = ddoc.get('views', {}).keys() + view_names = list(ddoc.get('views', {}).keys()) if len(view_names) > 0: if verbosity >= 1: - print 'Triggering view rebuild' + print('Triggering view rebuild') view = '%s/%s' % (design_name, view_names[0]) list(db.view(view, limit=0)) @@ -136,9 +139,9 @@ def copy_designs(self, app, temp, verbosity=2, delete=True): This is used to reduce the waiting time for blocking view updates """ - app_name = app.__name__.rsplit('.', 1)[0] + app_name = app.name.rsplit('.', 1)[0] app_labels = set() - schema_list = self.app_schema.values() + schema_list = list(self.app_schema.values()) for schema_dict in schema_list: for schema in schema_dict.values(): app_module = schema.__module__.rsplit(".", 1)[0] @@ -148,7 +151,7 @@ def copy_designs(self, app, temp, verbosity=2, delete=True): if not app_label in self._databases: continue if verbosity >=1: - print "Copy prepared design docs for `%s`" % app_name + print("Copy prepared design docs for `%s`" % app_name) db = self.get_db(app_label) tmp_name = '%s-%s' % (app_label, temp) @@ -163,7 +166,7 @@ def copy_designs(self, app, temp, verbosity=2, delete=True): del db[from_id] except ResourceNotFound: - print '%s not found.' % (from_id, ) + print('%s not found.' % (from_id, )) return @@ -183,7 +186,7 @@ def register_schema(self, app_label, *schema): """ register a Document object""" for s in schema: schema_name = schema[0].__name__.lower() - schema_dict = self.app_schema.setdefault(app_label, SortedDict()) + schema_dict = self.app_schema.setdefault(app_label, OrderedDict()) if schema_name in schema_dict: fname1 = os.path.abspath(sys.modules[s.__module__].__file__) fname2 = os.path.abspath(sys.modules[schema_dict[schema_name].__module__].__file__) @@ -193,7 +196,7 @@ def register_schema(self, app_label, *schema): def get_schema(self, app_label, schema_name): """ retriev Document object from its name and app name """ - return self.app_schema.get(app_label, SortedDict()).get(schema_name.lower()) + return self.app_schema.get(app_label, OrderedDict()).get(schema_name.lower()) couchdbkit_handler = CouchdbkitHandler(COUCHDB_DATABASES) register_schema = couchdbkit_handler.register_schema diff --git a/couchdbkit/ext/django/management/commands/sync_couchdb.py b/couchdbkit/ext/django/management/commands/sync_couchdb.py index faa9e6c..cfdfd64 100644 --- a/couchdbkit/ext/django/management/commands/sync_couchdb.py +++ b/couchdbkit/ext/django/management/commands/sync_couchdb.py @@ -1,4 +1,5 @@ -from django.db.models import get_apps +from __future__ import absolute_import +from django.apps import apps from django.core.management.base import BaseCommand from couchdbkit.ext.django.loading import couchdbkit_handler @@ -6,5 +7,5 @@ class Command(BaseCommand): help = 'Sync couchdb views.' def handle(self, *args, **options): - for app in get_apps(): + for app in apps.get_apps(): couchdbkit_handler.sync(app, verbosity=2) diff --git a/couchdbkit/ext/django/management/commands/sync_finish_couchdb.py b/couchdbkit/ext/django/management/commands/sync_finish_couchdb.py index e6bbd05..4a45008 100644 --- a/couchdbkit/ext/django/management/commands/sync_finish_couchdb.py +++ b/couchdbkit/ext/django/management/commands/sync_finish_couchdb.py @@ -1,4 +1,5 @@ -from django.db.models import get_apps +from __future__ import absolute_import +from django.apps import apps from django.core.management.base import BaseCommand from couchdbkit.ext.django.loading import couchdbkit_handler @@ -6,5 +7,5 @@ class Command(BaseCommand): help = 'Copy temporary design docs over existing ones' def handle(self, *args, **options): - for app in get_apps(): + for app in apps.get_apps(): couchdbkit_handler.copy_designs(app, temp='tmp', verbosity=2) diff --git a/couchdbkit/ext/django/management/commands/sync_prepare_couchdb.py b/couchdbkit/ext/django/management/commands/sync_prepare_couchdb.py index c86e18a..487756a 100644 --- a/couchdbkit/ext/django/management/commands/sync_prepare_couchdb.py +++ b/couchdbkit/ext/django/management/commands/sync_prepare_couchdb.py @@ -1,4 +1,5 @@ -from django.db.models import get_apps +from __future__ import absolute_import +from django.apps import apps from django.core.management.base import BaseCommand from couchdbkit.ext.django.loading import couchdbkit_handler @@ -6,5 +7,5 @@ class Command(BaseCommand): help = 'Sync design docs to temporary ids' def handle(self, *args, **options): - for app in get_apps(): + for app in apps.get_apps(): couchdbkit_handler.sync(app, verbosity=2, temp='tmp') diff --git a/couchdbkit/ext/django/schema.py b/couchdbkit/ext/django/schema.py index e5cc0c2..16d5d15 100644 --- a/couchdbkit/ext/django/schema.py +++ b/couchdbkit/ext/django/schema.py @@ -17,8 +17,10 @@ """ Wrapper of couchdbkit Document and Properties for django. It also add possibility to a document to register itself in CouchdbkitHandler """ +from __future__ import absolute_import import re import sys +import six try: from django.db.models.options import get_verbose_name @@ -38,7 +40,7 @@ 'DecimalProperty', 'BooleanProperty', 'FloatProperty', 'DateTimeProperty', 'DateProperty', 'TimeProperty', 'dict_to_json', 'list_to_json', 'value_to_json', - 'value_to_python', 'dict_to_python', 'list_to_python', + 'dict_to_python', 'list_to_python', 'convert_property', 'DocumentSchema', 'Document', 'SchemaProperty', 'SchemaListProperty', 'ListProperty', 'DictProperty', 'StringDictProperty', 'StringListProperty', @@ -46,7 +48,9 @@ DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering', - 'app_label') + 'app_label', 'string_conversions', 'properties', + 'update_properties') +DISCARD_NAMES = ('abstract',) class Options(object): """ class based on django.db.models.options. We only keep @@ -74,7 +78,7 @@ def contribute_to_class(self, cls, name): # Ignore any private attributes that Django doesn't care about. # NOTE: We can't modify a dictionary's contents while looping # over it, so we loop over the *original* dictionary instead. - if name.startswith('_'): + if name.startswith('_') or name in DISCARD_NAMES: del meta_attrs[name] for attr_name in DEFAULT_NAMES: if attr_name in meta_attrs: @@ -88,7 +92,7 @@ def contribute_to_class(self, cls, name): # Any leftover attributes must be invalid. if meta_attrs != {}: - raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys())) + raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(list(meta_attrs.keys()))) else: self.verbose_name_plural = string_concat(self.verbose_name, 's') del self.meta @@ -121,6 +125,8 @@ def __new__(cls, name, bases, attrs): if not attr_meta: meta = getattr(new_class, 'Meta', None) else: + if getattr(attr_meta, 'abstract', False): + return new_class meta = attr_meta if getattr(meta, 'app_label', None) is None: @@ -141,9 +147,8 @@ def add_to_class(cls, name, value): else: setattr(cls, name, value) -class Document(schema.Document): +class Document(six.with_metaclass(DocumentMeta, schema.Document)): """ Document object for django extension """ - __metaclass__ = DocumentMeta get_id = property(lambda self: self['_id']) get_rev = property(lambda self: self['_rev']) @@ -184,7 +189,6 @@ def get_db(cls): dict_to_json = schema.dict_to_json list_to_json = schema.list_to_json value_to_json = schema.value_to_json -value_to_python = schema.value_to_python dict_to_python = schema.dict_to_python list_to_python = schema.list_to_python convert_property = schema.convert_property diff --git a/couchdbkit/ext/django/testrunner.py b/couchdbkit/ext/django/testrunner.py index cd96d79..4a59161 100644 --- a/couchdbkit/ext/django/testrunner.py +++ b/couchdbkit/ext/django/testrunner.py @@ -3,6 +3,8 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import +from __future__ import print_function from django.test.simple import DjangoTestSuiteRunner from django.conf import settings @@ -32,7 +34,7 @@ def get_test_db(self, db): return test_db def setup_databases(self, **kwargs): - print "overridding the couchdbkit database settings to use a test database!" + print("overridding the couchdbkit database settings to use a test database!") # first pass: just implement this as a monkey-patch to the loading module # overriding all the existing couchdb settings @@ -75,9 +77,9 @@ def teardown_databases(self, old_config, **kwargs): try: db.server.delete_db(db.dbname) deleted_databases.append(db.dbname) - print "deleted database %s for %s" % (db.dbname, app_label) + print("deleted database %s for %s" % (db.dbname, app_label)) except ResourceNotFound: - print "database %s not found for %s! it was probably already deleted." % (db.dbname, app_label) + print("database %s not found for %s! it was probably already deleted." % (db.dbname, app_label)) if skipcount: - print "skipped deleting %s app databases that were already deleted" % skipcount + print("skipped deleting %s app databases that were already deleted" % skipcount) return super(CouchDbKitTestSuiteRunner, self).teardown_databases(old_config, **kwargs) diff --git a/couchdbkit/ext/pylons/auth/adapters.py b/couchdbkit/ext/pylons/auth/adapters.py index 7215502..0269340 100644 --- a/couchdbkit/ext/pylons/auth/adapters.py +++ b/couchdbkit/ext/pylons/auth/adapters.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import from repoze.what.adapters import BaseSourceAdapter from repoze.who.interfaces import IAuthenticator from repoze.who.interfaces import IMetadataProvider diff --git a/couchdbkit/ext/pylons/auth/basic.py b/couchdbkit/ext/pylons/auth/basic.py index 66278ce..0fa6dc3 100644 --- a/couchdbkit/ext/pylons/auth/basic.py +++ b/couchdbkit/ext/pylons/auth/basic.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import logging from paste.request import parse_dict_querystring, parse_formvars from paste.httpexceptions import HTTPUnauthorized diff --git a/couchdbkit/ext/pylons/auth/model.py b/couchdbkit/ext/pylons/auth/model.py index 2203e66..2a3c097 100644 --- a/couchdbkit/ext/pylons/auth/model.py +++ b/couchdbkit/ext/pylons/auth/model.py @@ -3,12 +3,14 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import from hashlib import sha256 import os from .... import Document, SchemaListProperty, StringProperty, \ StringListProperty +import six class Permission(Document): name = StringProperty(required=True) @@ -28,7 +30,7 @@ class User(Document): @staticmethod def _hash_password(cleartext): - if isinstance(cleartext, unicode): + if isinstance(cleartext, six.text_type): password_8bit = cleartext.encode('UTF-8') else: password_8bit = cleartext @@ -39,7 +41,7 @@ def _hash_password(cleartext): hash.update(password_8bit + salt.hexdigest()) hashed_password = salt.hexdigest() + hash.hexdigest() - if not isinstance(hashed_password, unicode): + if not isinstance(hashed_password, six.text_type): hashed_password = hashed_password.decode('UTF-8') return hashed_password diff --git a/couchdbkit/ext/pylons/commands.py b/couchdbkit/ext/pylons/commands.py index 925575c..2b46ae4 100644 --- a/couchdbkit/ext/pylons/commands.py +++ b/couchdbkit/ext/pylons/commands.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import os from paste.deploy import loadapp from paste.script.command import Command diff --git a/couchdbkit/ext/pylons/db.py b/couchdbkit/ext/pylons/db.py index f56c7e2..ef6a67c 100644 --- a/couchdbkit/ext/pylons/db.py +++ b/couchdbkit/ext/pylons/db.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import os.path from ...client import Server diff --git a/couchdbkit/ext/pylons/test.py b/couchdbkit/ext/pylons/test.py index b0785ac..ea93b64 100644 --- a/couchdbkit/ext/pylons/test.py +++ b/couchdbkit/ext/pylons/test.py @@ -5,6 +5,7 @@ from __future__ import with_statement +from __future__ import absolute_import import os import unittest diff --git a/couchdbkit/external.py b/couchdbkit/external.py index 9a83989..97a382a 100644 --- a/couchdbkit/external.py +++ b/couchdbkit/external.py @@ -3,6 +3,7 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import import sys from .utils import json diff --git a/couchdbkit/loaders.py b/couchdbkit/loaders.py index e57355f..cd574a8 100644 --- a/couchdbkit/loaders.py +++ b/couchdbkit/loaders.py @@ -20,7 +20,9 @@ """ from __future__ import with_statement +from __future__ import absolute_import from .designer import document, push, pushapps, pushdocs +import six class BaseDocsLoader(object): """Baseclass for all doc loaders. """ @@ -46,13 +48,13 @@ class FileSystemDocsLoader(BaseDocsLoader): """ def __init__(self, designpath, docpath=None): - if isinstance(designpath, basestring): + if isinstance(designpath, six.string_types): self.designpaths = [designpath] else: self.designpaths = designpath docpath = docpath or [] - if isinstance(docpath, basestring): + if isinstance(docpath, six.string_types): docpath = [docpath] self.docpaths = docpath diff --git a/couchdbkit/logging.py b/couchdbkit/logging.py new file mode 100644 index 0000000..a2a6f7f --- /dev/null +++ b/couchdbkit/logging.py @@ -0,0 +1,32 @@ +from __future__ import absolute_import +import logging + +LOG_LEVELS = { + "critical": logging.CRITICAL, + "error": logging.ERROR, + "warning": logging.WARNING, + "info": logging.INFO, + "debug": logging.DEBUG +} + + +logger = logging.getLogger('couchdbkit') +request_logger = logging.getLogger('couchdbkit.request') +error_logger = logging.getLogger('couchdbkit.error') + + +def set_logging(level, handler=None): + """ + Set level of logging, and choose where to display/save logs + (file or standard output). + """ + if not handler: + handler = logging.StreamHandler() + + loglevel = LOG_LEVELS.get(level, logging.INFO) + logger.setLevel(loglevel) + format = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" + datefmt = r"%Y-%m-%d %H:%M:%S" + + handler.setFormatter(logging.Formatter(format, datefmt)) + logger.addHandler(handler) diff --git a/couchdbkit/resource.py b/couchdbkit/resource.py index c0a6470..8b6c288 100644 --- a/couchdbkit/resource.py +++ b/couchdbkit/resource.py @@ -19,17 +19,21 @@ u'Welcome' """ +from __future__ import absolute_import import base64 import re +from datetime import datetime from restkit import Resource, ClientResponse -from restkit.errors import ResourceError, RequestFailed, RequestError -from restkit.util import url_quote +from restkit.errors import ResourceError, RequestFailed +from restkit.util import url_quote, make_uri from . import __version__ from .exceptions import ResourceNotFound, ResourceConflict, \ PreconditionFailed from .utils import json +from .logging import request_logger +import six USER_AGENT = 'couchdbkit/%s' % __version__ @@ -94,6 +98,11 @@ def request(self, method, path=None, payload=None, headers=None, **params): @return: tuple (data, resp), where resp is an `httplib2.Response` object and data a python object (often a dict). """ + # logging information + start_time = datetime.utcnow() + resp = None + error_status = None + has_error = False headers = headers or {} headers.setdefault('Accept', 'application/json') @@ -101,7 +110,7 @@ def request(self, method, path=None, payload=None, headers=None, **params): if payload is not None: #TODO: handle case we want to put in payload json file. - if not hasattr(payload, 'read') and not isinstance(payload, basestring): + if not hasattr(payload, 'read') and not isinstance(payload, six.string_types): payload = json.dumps(payload).encode('utf-8') headers.setdefault('Content-Type', 'application/json') @@ -109,8 +118,7 @@ def request(self, method, path=None, payload=None, headers=None, **params): try: resp = Resource.request(self, method, path=path, payload=payload, headers=headers, **params) - - except ResourceError, e: + except ResourceError as e: msg = getattr(e, 'msg', '') if e.response and msg: if e.response.headers.get('content-type') == 'application/json': @@ -124,6 +132,8 @@ def request(self, method, path=None, payload=None, headers=None, **params): else: error = msg + has_error = True + error_status = e.status_int if e.status_int == 404: raise ResourceNotFound(error, http_code=404, response=e.response) @@ -138,6 +148,23 @@ def request(self, method, path=None, payload=None, headers=None, **params): raise except: raise + finally: + database = _get_db_from_uri(self.uri, path) + end_time = datetime.utcnow() + duration = end_time - start_time + logging_context = dict( + method=method, + path=path, + params=params, + start_time=start_time, + end_time=end_time, + status_code=resp.status_int if resp else error_status, + content_length=resp.headers.get('content-length') if resp else None, + has_error=has_error, + duration=duration, + database=database + ) + request_logger.debug('{} to {}/{} took {}'.format(method, database, path, duration), extra=logging_context) return resp @@ -150,7 +177,7 @@ def encode_params(params): value = json.dumps(value) elif value is None: continue - elif not isinstance(value, basestring): + elif not isinstance(value, six.string_types): value = json.dumps(value) _params[name] = value return _params @@ -166,9 +193,17 @@ def escape_docid(docid): re_sp = re.compile('\s') def encode_attachments(attachments): - for k, v in attachments.iteritems(): + for k, v in six.iteritems(attachments): if v.get('stub', False): continue else: v['data'] = re_sp.sub('', base64.b64encode(v['data'])) return attachments + + +def _get_db_from_uri(uri, path): + full_uri = make_uri(uri, path) + try: + return full_uri.split('/')[3] + except IndexError: + return 'unknown' diff --git a/couchdbkit/schema/__init__.py b/couchdbkit/schema/__init__.py index 875f37e..b6fb5e5 100644 --- a/couchdbkit/schema/__init__.py +++ b/couchdbkit/schema/__init__.py @@ -155,9 +155,7 @@ class A(Dcoument): all threads. It's better to use the db object methods if you want to be threadsafe. """ - from .properties import ( - ALLOWED_PROPERTY_TYPES, Property, StringProperty, IntegerProperty, @@ -175,12 +173,9 @@ class A(Dcoument): dict_to_json, list_to_json, value_to_json, - MAP_TYPES_PROPERTIES, - value_to_python, dict_to_python, list_to_python, convert_property, - value_to_property, LazyDict, LazyList) @@ -191,7 +186,6 @@ class A(Dcoument): from .base import ( ReservedWordError, - ALLOWED_PROPERTY_TYPES, DocumentSchema, SchemaProperties, DocumentBase, diff --git a/couchdbkit/schema/base.py b/couchdbkit/schema/base.py index fbac71b..84fb60c 100644 --- a/couchdbkit/schema/base.py +++ b/couchdbkit/schema/base.py @@ -6,17 +6,23 @@ """ module that provides a Document object that allows you to map CouchDB document in Python statically, dynamically or both """ +from __future__ import absolute_import +import copy - +import jsonobject +from jsonobject.exceptions import DeleteNotAllowed +from couchdbkit.utils import ProxyDict +from ..exceptions import ResourceNotFound, ReservedWordError from . import properties as p -from .properties import value_to_python, \ -convert_property, MAP_TYPES_PROPERTIES, ALLOWED_PROPERTY_TYPES, \ +from .properties import \ +convert_property, \ LazyDict, LazyList from ..exceptions import DuplicatePropertyError, ResourceNotFound, \ ReservedWordError +import six -__all__ = ['ReservedWordError', 'ALLOWED_PROPERTY_TYPES', 'DocumentSchema', +__all__ = ['ReservedWordError', 'DocumentSchema', 'SchemaProperties', 'DocumentBase', 'QueryMixin', 'AttachmentMixin', 'Document', 'StaticDocument', 'valid_id'] @@ -32,382 +38,76 @@ def check_reserved_words(attr_name): locals()) def valid_id(value): - if isinstance(value, basestring) and not value.startswith('_'): + if isinstance(value, six.string_types) and not value.startswith('_'): return value raise TypeError('id "%s" is invalid' % value) -class SchemaProperties(type): - - def __new__(cls, name, bases, attrs): - # init properties - properties = {} - defined = set() - for base in bases: - if hasattr(base, '_properties'): - property_keys = base._properties.keys() - duplicate_properties = defined.intersection(property_keys) - if duplicate_properties: - raise DuplicatePropertyError( - 'Duplicate properties in base class %s already defined: %s' % (base.__name__, list(duplicate_properties))) - defined.update(property_keys) - properties.update(base._properties) - - doc_type = attrs.get('doc_type', False) - if not doc_type: - doc_type = name - else: - del attrs['doc_type'] - - attrs['_doc_type'] = doc_type - - for attr_name, attr in attrs.items(): - # map properties - if isinstance(attr, p.Property): - check_reserved_words(attr_name) - if attr_name in defined: - raise DuplicatePropertyError('Duplicate property: %s' % attr_name) - properties[attr_name] = attr - attr.__property_config__(cls, attr_name) - # python types - elif type(attr) in MAP_TYPES_PROPERTIES and \ - not attr_name.startswith('_') and \ - attr_name not in _NODOC_WORDS: - check_reserved_words(attr_name) - if attr_name in defined: - raise DuplicatePropertyError('Duplicate property: %s' % attr_name) - prop = MAP_TYPES_PROPERTIES[type(attr)](default=attr) - properties[attr_name] = prop - prop.__property_config__(cls, attr_name) - attrs[attr_name] = prop - - attrs['_properties'] = properties - return type.__new__(cls, name, bases, attrs) - - -class DocumentSchema(object): - __metaclass__ = SchemaProperties - - _dynamic_properties = None - _allow_dynamic_properties = True - _doc = None - _db = None - _doc_type_attr = 'doc_type' - - def __init__(self, _d=None, **properties): - self._dynamic_properties = {} - self._doc = {} - - if _d is not None: - if not isinstance(_d, dict): - raise TypeError('_d should be a dict') - properties.update(_d) - - doc_type = getattr(self, '_doc_type', self.__class__.__name__) - self._doc[self._doc_type_attr] = doc_type - - for prop in self._properties.values(): - if prop.name in properties: - value = properties.pop(prop.name) - if value is None: - value = prop.default_value() - else: - value = prop.default_value() - prop.__property_init__(self, value) - self.__dict__[prop.name] = value - - _dynamic_properties = properties.copy() - for attr_name, value in _dynamic_properties.iteritems(): - if attr_name not in self._properties \ - and value is not None: - if isinstance(value, p.Property): - value.__property_config__(self, attr_name) - value.__property_init__(self, value.default_value()) - elif isinstance(value, DocumentSchema): - from couchdbkit.schema import SchemaProperty - value = SchemaProperty(value) - value.__property_config__(self, attr_name) - value.__property_init__(self, value.default_value()) - - - setattr(self, attr_name, value) - # remove the kwargs to speed stuff - del properties[attr_name] - - def dynamic_properties(self): - """ get dict of dynamic properties """ - if self._dynamic_properties is None: - return {} - return self._dynamic_properties.copy() - - @classmethod - def properties(cls): - """ get dict of defined properties """ - return cls._properties.copy() - - def all_properties(self): - """ get all properties. - Generally we just need to use keys""" - all_properties = self._properties.copy() - all_properties.update(self.dynamic_properties()) - return all_properties - - def to_json(self): - if self._doc.get(self._doc_type_attr) is None: - doc_type = getattr(self, '_doc_type', self.__class__.__name__) - self._doc[self._doc_type_attr] = doc_type - return self._doc - - #TODO: add a way to maintain custom dynamic properties - def __setattr__(self, key, value): - """ - override __setattr__ . If value is in dir, we just use setattr. - If value is not known (dynamic) we test if type and name of value - is supported (in ALLOWED_PROPERTY_TYPES, Property instance and not - start with '_') a,d add it to `_dynamic_properties` dict. If value is - a list or a dict we use LazyList and LazyDict to maintain in the value. - """ - if key == "_id" and valid_id(value): - self._doc['_id'] = value - elif key == "_deleted": - self._doc["_deleted"] = value - elif key == "_attachments": - if key not in self._doc or not value: - self._doc[key] = {} - elif not isinstance(self._doc[key], dict): - self._doc[key] = {} - value = LazyDict(self._doc[key], init_vals=value) +class SchemaProperties(jsonobject.JsonObjectMeta): + def __new__(mcs, name, bases, dct): + if '_doc_type_attr' in dct: + doc_type_attr = dct['_doc_type_attr'] else: - check_reserved_words(key) - if not hasattr( self, key ) and not self._allow_dynamic_properties: - raise AttributeError("%s is not defined in schema (not a valid property)" % key) - - elif not key.startswith('_') and \ - key not in self.properties() and \ - key not in dir(self): - if type(value) not in ALLOWED_PROPERTY_TYPES and \ - not isinstance(value, (p.Property,)): - raise TypeError("Document Schema cannot accept values of type '%s'." % - type(value).__name__) - - if self._dynamic_properties is None: - self._dynamic_properties = {} - - if isinstance(value, dict): - if key not in self._doc or not value: - self._doc[key] = {} - elif not isinstance(self._doc[key], dict): - self._doc[key] = {} - value = LazyDict(self._doc[key], init_vals=value) - elif isinstance(value, list): - if key not in self._doc or not value: - self._doc[key] = [] - elif not isinstance(self._doc[key], list): - self._doc[key] = [] - value = LazyList(self._doc[key], init_vals=value) - - self._dynamic_properties[key] = value - - if not isinstance(value, (p.Property,)) and \ - not isinstance(value, dict) and \ - not isinstance(value, list): - if callable(value): - value = value() - self._doc[key] = convert_property(value) - else: - object.__setattr__(self, key, value) - - def __delattr__(self, key): - """ delete property - """ - if key in self._doc: - del self._doc[key] - - if self._dynamic_properties and key in self._dynamic_properties: - del self._dynamic_properties[key] + doc_type_attr = ( + super(SchemaProperties, mcs).__new__(mcs, '', bases, {}) + )._doc_type_attr + if isinstance(dct.get(doc_type_attr), six.string_types): + doc_type = dct.pop(doc_type_attr) else: - object.__delattr__(self, key) - - def __getattr__(self, key): - """ get property value - """ - if self._dynamic_properties and key in self._dynamic_properties: - return self._dynamic_properties[key] - elif key in ('_id', '_rev', '_attachments', 'doc_type'): - return self._doc.get(key) - try: - return self.__dict__[key] - except KeyError, e: - raise AttributeError(e) - - def __getitem__(self, key): - """ get property value - """ - try: - attr = getattr(self, key) - if callable(attr): - raise AttributeError("existing instance method") - return attr - except AttributeError: - if key in self._doc: - return self._doc[key] - raise - - def __setitem__(self, key, value): - """ add a property - """ - setattr(self, key, value) - - - def __delitem__(self, key): - """ delete a property - """ - try: - delattr(self, key) - except AttributeError, e: - raise KeyError, e - + doc_type = name + dct[doc_type_attr] = jsonobject.StringProperty( + default=lambda self: self._doc_type + ) + cls = super(SchemaProperties, mcs).__new__(mcs, name, bases, dct) + cls._doc_type = doc_type + return cls - def __contains__(self, key): - """ does object contain this propery ? - @param key: name of property +class DocumentSchema(six.with_metaclass(SchemaProperties, jsonobject.JsonObject)): - @return: True if key exist. - """ - if key in self.all_properties(): - return True - elif key in self._doc: - return True - return False - - def __iter__(self): - """ iter document instance properties - """ - for k in self.all_properties().keys(): - yield k, self[k] - raise StopIteration + _validate_required_lazily = True + _doc_type_attr = 'doc_type' - iteritems = __iter__ + @property + def _doc(self): + return ProxyDict(self, self._obj) - def items(self): - """ return list of items - """ - return [(k, self[k]) for k in self.all_properties().keys()] + @property + def _dynamic_properties(self): + from jsonobject.base import get_dynamic_properties + return get_dynamic_properties(self) + def dynamic_properties(self): + return self._dynamic_properties.copy() - def __len__(self): - """ get number of properties - """ - return len(self._doc or ()) + def __delitem__(self, key): + try: + super(DocumentSchema, self).__delitem__(key) + except DeleteNotAllowed: + self[key] = None - def __getstate__(self): - """ let pickle play with us """ - obj_dict = self.__dict__.copy() - return obj_dict + def __delattr__(self, name): + try: + super(DocumentSchema, self).__delattr__(name) + except DeleteNotAllowed: + setattr(self, name, None) - @classmethod - def wrap(cls, data): - """ wrap `data` dict in object properties """ - instance = cls() - instance._doc = data - for prop in instance._properties.values(): - if prop.name in data: - value = data[prop.name] - if value is not None: - value = prop.to_python(value) - else: - value = prop.default_value() - else: - value = prop.default_value() - prop.__property_init__(instance, value) - - if cls._allow_dynamic_properties: - for attr_name, value in data.iteritems(): - if attr_name in instance.properties(): - continue - if value is None: - continue - elif attr_name.startswith('_'): - continue - elif attr_name == cls._doc_type_attr: - continue - else: - value = value_to_python(value) - setattr(instance, attr_name, value) - return instance - from_json = wrap - - def validate(self, required=True): - """ validate a document """ - for attr_name, value in self._doc.items(): - if attr_name in self._properties: - self._properties[attr_name].validate( - getattr(self, attr_name), required=required) - return True - - def clone(self, **kwargs): - """ clone a document """ - kwargs.update(self._dynamic_properties) - obj = self.__class__(**kwargs) - obj._doc = self._doc - return obj + def __getitem__(self, item): + try: + return super(DocumentSchema, self).__getitem__(item) + except KeyError as e: + raise AttributeError(e) - @classmethod - def build(cls, **kwargs): - """ build a new instance from this document object. """ - properties = {} - for attr_name, attr in kwargs.items(): - if isinstance(attr, (p.Property,)): - properties[attr_name] = attr - attr.__property_config__(cls, attr_name) - elif type(attr) in MAP_TYPES_PROPERTIES and \ - not attr_name.startswith('_') and \ - attr_name not in _NODOC_WORDS: - check_reserved_words(attr_name) - - prop = MAP_TYPES_PROPERTIES[type(attr)](default=attr) - properties[attr_name] = prop - prop.__property_config__(cls, attr_name) - properties[attr_name] = prop - return type('AnonymousSchema', (cls,), properties) class DocumentBase(DocumentSchema): - """ Base Document object that map a CouchDB Document. - It allow you to statically map a document by - providing fields like you do with any ORM or - dynamically. Ie unknown fields are loaded as - object property that you can edit, datetime in - iso3339 format are automatically translated in - python types (date, time & datetime) and decimal too. - - Example of documentass - .. code-block:: python + _id = jsonobject.StringProperty(exclude_if_none=True) + _rev = jsonobject.StringProperty(exclude_if_none=True) + _attachments = jsonobject.DictProperty(exclude_if_none=True, default=None) - from couchdbkit.schema import * - class MyDocument(Document): - mystring = StringProperty() - myotherstring = unicode() # just use python types - - - Document fields can be accessed as property or - key of dict. These are similar : ``value = instance.key or value = instance['key'].`` - - To delete a property simply do ``del instance[key'] or delattr(instance, key)`` - """ _db = None - def __init__(self, _d=None, **kwargs): - _d = _d or {} - - docid = kwargs.pop('_id', _d.pop("_id", "")) - docrev = kwargs.pop('_rev', _d.pop("_rev", "")) - - super(DocumentBase, self).__init__(_d, **kwargs) - - if docid: self._doc['_id'] = valid_id(docid) - if docrev: self._doc['_rev'] = docrev + # The rest of this class is mostly copied from couchdbkit 0.5.7 @classmethod def set_db(cls, db): @@ -440,25 +140,35 @@ def save(self, **params): store = save @classmethod - def save_docs(cls, docs, use_uuids=True, all_or_nothing=False): + def save_docs(cls, docs, use_uuids=True): """ Save multiple documents in database. @params docs: list of couchdbkit.schema.Document instance @param use_uuids: add _id in doc who don't have it already set. - @param all_or_nothing: In the case of a power failure, when the database - restarts either all the changes will have been saved or none of them. - However, it does not do conflict checking, so the documents will - be committed even if this creates conflicts. - """ db = cls.get_db() - docs_to_save= [doc for doc in docs if doc._doc_type == cls._doc_type] - if not len(docs_to_save) == len(docs): + if any(doc._doc_type != cls._doc_type for doc in docs): raise ValueError("one of your documents does not have the correct type") - db.bulk_save(docs_to_save, use_uuids=use_uuids, all_or_nothing=all_or_nothing) + db.bulk_save(docs, use_uuids=use_uuids) bulk_save = save_docs + @classmethod + def delete_docs(cls, docs, empty_on_delete=False): + """ Bulk delete documents in a database + + @params docs: list of couchdbkit.schema.Document instance + @param empty_on_delete: default is False if you want to make + sure the doc is emptied and will not be stored as is in Apache + CouchDB. + """ + db = cls.get_db() + if any(doc._doc_type != cls._doc_type for doc in docs): + raise ValueError("one of your documents does not have the correct type") + db.bulk_delete(docs, empty_on_delete=empty_on_delete) + + bulk_delete = delete_docs + @classmethod def get(cls, docid, rev=None, db=None, dynamic_properties=True): """ get document with `docid` @@ -591,36 +301,16 @@ def view(cls, view_name, wrapper=None, dynamic_properties=None, dynamic_properties=dynamic_properties, wrap_doc=wrap_doc, wrapper=wrapper, schema=classes, **params) - @classmethod - def temp_view(cls, design, wrapper=None, dynamic_properties=None, - wrap_doc=True, classes=None, **params): - """ Slow view. Like in view method, - results are automatically wrapped to - Document object. - - @params design: design object, See `simplecouchd.client.Database` - @dynamic_properties: do we handle properties which aren't in - the schema ? - @wrap_doc: If True, if a doc is present in the row it will be - used for wrapping. Default is True. - @params params: params of view - - @return: Like view, return a :class:`simplecouchdb.core.ViewResults` - instance. All results are wrapped to current document instance. - """ - db = cls.get_db() - return db.temp_view(design, - dynamic_properties=dynamic_properties, wrap_doc=wrap_doc, - wrapper=wrapper, schema=classes or cls, **params) class Document(DocumentBase, QueryMixin, AttachmentMixin): """ Full featured document object implementing the following : - :class:`QueryMixin` for view & temp_view that wrap results to this object + :class:`QueryMixin` for view that wrap results to this object :class `AttachmentMixin` for attachments function """ + class StaticDocument(Document): """ Shorthand for a document that disallow dynamic properties. diff --git a/couchdbkit/schema/properties.py b/couchdbkit/schema/properties.py index 598a105..de0d8f1 100644 --- a/couchdbkit/schema/properties.py +++ b/couchdbkit/schema/properties.py @@ -2,13 +2,11 @@ # # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. - -""" properties used by Document object """ - -import decimal -import datetime -import re -import time +from __future__ import absolute_import +import functools +from jsonobject.properties import * +from jsonobject.base import DefaultProperty +import six try: from collections import MutableSet, Iterable @@ -20,1056 +18,40 @@ def is_iterable(c): except ImportError: support_setproperty = False -from couchdbkit.exceptions import BadValueError - -__all__ = ['ALLOWED_PROPERTY_TYPES', 'Property', 'StringProperty', - 'IntegerProperty', 'DecimalProperty', 'BooleanProperty', - 'FloatProperty', 'DateTimeProperty', 'DateProperty', - 'TimeProperty', 'DictProperty', 'StringDictProperty', - 'ListProperty', 'StringListProperty', - 'dict_to_json', 'list_to_json', - 'value_to_json', 'MAP_TYPES_PROPERTIES', 'value_to_python', - 'dict_to_python', 'list_to_python', 'convert_property', - 'value_to_property', 'LazyDict', 'LazyList'] - -if support_setproperty: - __all__ += ['SetProperty', 'LazySet'] - -ALLOWED_PROPERTY_TYPES = set([ - basestring, - str, - unicode, - bool, - int, - long, - float, - datetime.datetime, - datetime.date, - datetime.time, - decimal.Decimal, - dict, - list, - set, - type(None) -]) - -re_date = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])$') -re_time = re.compile('^([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?$') -re_datetime = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])(\D?([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?([zZ]|([\+-])([01]\d|2[0-3])\D?([0-5]\d)?)?)?$') -re_decimal = re.compile('^(\d+)\.(\d+)$') - -class Property(object): - """ Property base which all other properties - inherit.""" - creation_counter = 0 - - def __init__(self, verbose_name=None, name=None, - default=None, required=False, validators=None, - choices=None): - """ Default constructor for a property. - - :param verbose_name: str, verbose name of field, could - be use for description - :param name: str, name of field - :param default: default value - :param required: True if field is required, default is False - :param validators: list of callable or callable, field validators - function that are executed when document is saved. - """ - self.verbose_name = verbose_name - self.name = name - self.default = default - self.required = required - self.validators = validators - self.choices = choices - self.creation_counter = Property.creation_counter - Property.creation_counter += 1 - - def __property_config__(self, document_class, property_name): - self.document_class = document_class - if self.name is None: - self.name = property_name - - def __property_init__(self, document_instance, value): - """ method used to set value of the property when - we create the document. Don't check required. """ - if value is not None: - value = self.to_json(self.validate(value, required=False)) - document_instance._doc[self.name] = value - - def __get__(self, document_instance, document_class): - if document_instance is None: - return self - - value = document_instance._doc.get(self.name) - if value is not None: - value = self._to_python(value) - - return value - - def __set__(self, document_instance, value): - value = self.validate(value, required=False) - document_instance._doc[self.name] = self._to_json(value) - - def __delete__(self, document_instance): - pass - - def default_value(self): - """ return default value """ - - default = self.default - if callable(default): - default = default() - return default - - def validate(self, value, required=True): - """ validate value """ - if required and self.empty(value): - if self.required: - raise BadValueError("Property %s is required." % self.name) - else: - if self.choices and value is not None: - if isinstance(self.choices, list): choice_list = self.choices - if isinstance(self.choices, dict): choice_list = self.choices.keys() - if isinstance(self.choices, tuple): choice_list = [key for (key, name) in self.choices] - - if value not in choice_list: - raise BadValueError('Property %s is %r; must be one of %r' % ( - self.name, value, choice_list)) - if self.validators: - if isinstance(self.validators, (list, tuple,)): - for validator in self.validators: - if callable(validator): - validator(value) - elif callable(self.validators): - self.validators(value) - return value - - def empty(self, value): - """ test if value is empty """ - return (not value and value != 0) or value is None - - def _to_python(self, value): - if value == None: - return value - return self.to_python(value) - - def _to_json(self, value): - if value == None: - return value - return self.to_json(value) - - def to_python(self, value): - """ convert to python type """ - return unicode(value) - - def to_json(self, value): - """ convert to json, Converted value is saved in couchdb. """ - return self.to_python(value) - - data_type = None - -class StringProperty(Property): - """ string property str or unicode property - - *Value type*: unicode - """ - - to_python = unicode - - def validate(self, value, required=True): - value = super(StringProperty, self).validate(value, - required=required) - - if value is None: - return value - - if not isinstance(value, basestring): - raise BadValueError( - 'Property %s must be unicode or str instance, not a %s' % (self.name, type(value).__name__)) - return value - - data_type = unicode - -class IntegerProperty(Property): - """ Integer property. map to int - - *Value type*: int - """ - to_python = int - - def empty(self, value): - return value is None - - def validate(self, value, required=True): - value = super(IntegerProperty, self).validate(value, - required=required) - - if value is None: - return value - - if value is not None and not isinstance(value, (int, long,)): - raise BadValueError( - 'Property %s must be %s or long instance, not a %s' - % (self.name, type(self.data_type).__name__, - type(value).__name__)) - - return value - - data_type = int -LongProperty = IntegerProperty - -class FloatProperty(Property): - """ Float property, map to python float - - *Value type*: float - """ - to_python = float - data_type = float - - def validate(self, value, required=True): - value = super(FloatProperty, self).validate(value, - required=required) - - if value is None: - return value - - if not isinstance(value, float): - raise BadValueError( - 'Property %s must be float instance, not a %s' - % (self.name, type(value).__name__)) - - return value -Number = FloatProperty - -class BooleanProperty(Property): - """ Boolean property, map to python bool - - *ValueType*: bool - """ - to_python = bool - data_type = bool - - def validate(self, value, required=True): - value = super(BooleanProperty, self).validate(value, - required=required) - - if value is None: - return value - - if value is not None and not isinstance(value, bool): - raise BadValueError( - 'Property %s must be bool instance, not a %s' - % (self.name, type(value).__name__)) - - return value - - def empty(self, value): - """test if boolean is empty""" - return value is None - -class DecimalProperty(Property): - """ Decimal property, map to Decimal python object +StringListProperty = functools.partial(ListProperty, six.text_type) +StringDictProperty = functools.partial(DictProperty, six.text_type) - *ValueType*: decimal.Decimal - """ - data_type = decimal.Decimal - def to_python(self, value): - return decimal.Decimal(value) - - def to_json(self, value): - return unicode(value) - -class DateTimeProperty(Property): - """DateTime property. It convert iso3339 string - to python and vice-versa. Map to datetime.datetime - object. - - *ValueType*: datetime.datetime - """ - - def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False, - **kwds): - super(DateTimeProperty, self).__init__(verbose_name, **kwds) - self.auto_now = auto_now - self.auto_now_add = auto_now_add - - def validate(self, value, required=True): - value = super(DateTimeProperty, self).validate(value, required=required) - - if value is None: - return value - - if value and not isinstance(value, self.data_type): - raise BadValueError('Property %s must be a %s, current is %s' % - (self.name, self.data_type.__name__, type(value).__name__)) - return value - - def default_value(self): - if self.auto_now or self.auto_now_add: - return self.now() - return Property.default_value(self) - - def to_python(self, value): - if isinstance(value, basestring): - try: - value = value.split('.', 1)[0] # strip out microseconds - value = value[0:19] # remove timezone - value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') - except ValueError, e: - raise ValueError('Invalid ISO date/time %r [%s]' % - (value, str(e))) - return value - - def to_json(self, value): - if self.auto_now: - value = self.now() - - if value is None: - return value - return value.replace(microsecond=0).isoformat() + 'Z' - - data_type = datetime.datetime - - @staticmethod - def now(): - return datetime.datetime.utcnow() - -class DateProperty(DateTimeProperty): - """ Date property, like DateTime property but only - for Date. Map to datetime.date object - - *ValueType*: datetime.date - """ - data_type = datetime.date - - @staticmethod - def now(): - return datetime.datetime.now().date() - - def to_python(self, value): - if isinstance(value, basestring): - try: - value = datetime.date(*time.strptime(value, '%Y-%m-%d')[:3]) - except ValueError, e: - raise ValueError('Invalid ISO date %r [%s]' % (value, - str(e))) - return value - - def to_json(self, value): - if value is None: - return value - return value.isoformat() - -class TimeProperty(DateTimeProperty): - """ Date property, like DateTime property but only - for time. Map to datetime.time object - - *ValueType*: datetime.time - """ - - data_type = datetime.time - - @staticmethod - def now(self): - return datetime.datetime.now().time() - - def to_python(self, value): - if isinstance(value, basestring): - try: - value = value.split('.', 1)[0] # strip out microseconds - value = datetime.time(*time.strptime(value, '%H:%M:%S')[3:6]) - except ValueError, e: - raise ValueError('Invalid ISO time %r [%s]' % (value, - str(e))) - return value - - def to_json(self, value): - if value is None: - return value - return value.replace(microsecond=0).isoformat() - - -class DictProperty(Property): - """ A property that stores a dict of things""" - - def __init__(self, verbose_name=None, default=None, - required=False, **kwds): - """ - :args verbose_name: Optional verbose name. - :args default: Optional default value; if omitted, an empty list is used. - :args**kwds: Optional additional keyword arguments, passed to base class. - - Note that the only permissible value for 'required' is True. - """ - - if default is None: - default = {} - - Property.__init__(self, verbose_name, default=default, - required=required, **kwds) - - data_type = dict - - def validate(self, value, required=True): - value = super(DictProperty, self).validate(value, required=required) - if value and value is not None: - if not isinstance(value, dict): - raise BadValueError('Property %s must be a dict' % self.name) - value = self.validate_dict_contents(value) - return value - - def validate_dict_contents(self, value): - try: - value = validate_dict_content(value) - except BadValueError: - raise BadValueError( - 'Items of %s dict must all be in %s' % - (self.name, ALLOWED_PROPERTY_TYPES)) - return value - - def default_value(self): - """Default value for list. - - Because the property supplied to 'default' is a static value, - that value must be shallow copied to prevent all fields with - default values from sharing the same instance. - - Returns: - Copy of the default value. - """ - value = super(DictProperty, self).default_value() - if value is None: - value = {} - return dict(value) - - def to_python(self, value): - return LazyDict(value) - - def to_json(self, value): - return value_to_json(value) - - - -class StringDictProperty(DictProperty): - - def to_python(self, value): - return LazyDict(value, item_type=basestring) - - def validate_dict_contents(self, value): +class Property(DefaultProperty): + def wrap(self, obj): try: - value = validate_dict_content(value, basestring) - except BadValueError: - raise BadValueError( - 'Items of %s dict must all be in %s' % - (self.name, basestring)) - return value - + return self.to_python(obj) + except NotImplementedError: + return super(Property, self).wrap(obj) - -class ListProperty(Property): - """A property that stores a list of things. - - """ - def __init__(self, verbose_name=None, default=None, - required=False, item_type=None, **kwds): - """Construct ListProperty. - - - :args verbose_name: Optional verbose name. - :args default: Optional default value; if omitted, an empty list is used. - :args**kwds: Optional additional keyword arguments, passed to base class. - - - """ - if default is None: - default = [] - - if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES: - raise ValueError('item_type %s not in %s' % (item_type, ALLOWED_PROPERTY_TYPES)) - self.item_type = item_type - - Property.__init__(self, verbose_name, default=default, - required=required, **kwds) - - data_type = list - - def validate(self, value, required=True): - value = super(ListProperty, self).validate(value, required=required) - if value and value is not None: - if not isinstance(value, list): - raise BadValueError('Property %s must be a list' % self.name) - value = self.validate_list_contents(value) - return value - - def validate_list_contents(self, value): - value = validate_list_content(value, item_type=self.item_type) + def unwrap(self, obj): try: - value = validate_list_content(value, item_type=self.item_type) - except BadValueError: - raise BadValueError( - 'Items of %s list must all be in %s' % - (self.name, ALLOWED_PROPERTY_TYPES)) - return value - - def default_value(self): - """Default value for list. - - Because the property supplied to 'default' is a static value, - that value must be shallow copied to prevent all fields with - default values from sharing the same instance. - - Returns: - Copy of the default value. - """ - value = super(ListProperty, self).default_value() - if value is None: - value = [] - return list(value) + return obj, self.to_json(obj) + except NotImplementedError: + return super(Property, self).unwrap(obj) def to_python(self, value): - return LazyList(value, item_type=self.item_type) + raise NotImplementedError() def to_json(self, value): - return value_to_json(value, item_type=self.item_type) - - -class StringListProperty(ListProperty): - """ shorthand for list that should containe only unicode""" - - def __init__(self, verbose_name=None, default=None, - required=False, **kwds): - super(StringListProperty, self).__init__(verbose_name=verbose_name, - default=default, required=required, item_type=basestring, **kwds) - - - - - -# dict proxy - -class LazyDict(dict): - """ object to make sure we keep updated of dict - in _doc. We just override a dict and maintain change in - doc reference (doc[keyt] obviously). - - if init_vals is specified, doc is overwritten - with the dict given. Otherwise, the values already in - doc are used. - """ - - def __init__(self, doc, item_type=None, init_vals=None): - dict.__init__(self) - self.item_type = item_type - - self.doc = doc - if init_vals is None: - self._wrap() - else: - for key, value in init_vals.items(): - self[key] = value - - def _wrap(self): - for key, json_value in self.doc.items(): - if isinstance(json_value, dict): - value = LazyDict(json_value, item_type=self.item_type) - elif isinstance(json_value, list): - value = LazyList(json_value, item_type=self.item_type) - else: - value = value_to_python(json_value, self.item_type) - dict.__setitem__(self, key, value) - - def __setitem__(self, key, value): - if isinstance(value, dict): - self.doc[key] = {} - value = LazyDict(self.doc[key], item_type=self.item_type, init_vals=value) - elif isinstance(value, list): - self.doc[key] = [] - value = LazyList(self.doc[key], item_type=self.item_type, init_vals=value) - else: - self.doc.update({key: value_to_json(value, item_type=self.item_type) }) - super(LazyDict, self).__setitem__(key, value) - - def __delitem__(self, key): - del self.doc[key] - super(LazyDict, self).__delitem__(key) - - def pop(self, key, *args): - default = len(args) == 1 - if default: - self.doc.pop(key, args[-1]) - return super(LazyDict, self).pop(key, args[-1]) - self.doc.pop(key) - return super(LazyDict, self).pop(key) - - def setdefault(self, key, default): - if key in self: - return self[key] - self.doc.setdefault(key, value_to_json(default, item_type=self.item_type)) - super(LazyDict, self).setdefault(key, default) - return default - - def update(self, value): - for k, v in value.items(): - self[k] = v - - def popitem(self, value): - new_value = super(LazyDict, self).popitem(value) - self.doc.popitem(value_to_json(value, item_type=self.item_type)) - return new_value - - def clear(self): - self.doc.clear() - super(LazyDict, self).clear() - - -class LazyList(list): - """ object to make sure we keep update of list - in _doc. We just override a list and maintain change in - doc reference (doc[index] obviously). - - if init_vals is specified, doc is overwritten - with the list given. Otherwise, the values already in - doc are used. - """ - - def __init__(self, doc, item_type=None, init_vals=None): - list.__init__(self) - - self.item_type = item_type - self.doc = doc - if init_vals is None: - # just wrap the current values - self._wrap() - else: - # initialize this list and the underlying list - # with the values given. - del self.doc[:] - for item in init_vals: - self.append(item) - - def _wrap(self): - for json_value in self.doc: - if isinstance(json_value, dict): - value = LazyDict(json_value, item_type=self.item_type) - elif isinstance(json_value, list): - value = LazyList(json_value, item_type=self.item_type) - else: - value = value_to_python(json_value, self.item_type) - list.append(self, value) - - def __delitem__(self, index): - del self.doc[index] - list.__delitem__(self, index) - - def __setitem__(self, index, value): - if isinstance(value, dict): - self.doc[index] = {} - value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value) - elif isinstance(value, list): - self.doc[index] = [] - value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value) - else: - self.doc[index] = value_to_json(value, item_type=self.item_type) - list.__setitem__(self, index, value) - - - def __delslice__(self, i, j): - del self.doc[i:j] - list.__delslice__(self, i, j) - - def __getslice__(self, i, j): - return LazyList(self.doc[i:j], self.item_type) - - def __setslice__(self, i, j, seq): - self.doc[i:j] = (value_to_json(v, item_type=self.item_type) for v in seq) - list.__setslice__(self, i, j, seq) - - def __contains__(self, value): - jvalue = value_to_json(value) - for m in self.doc: - if m == jvalue: return True - return False - - def append(self, *args, **kwargs): - if args: - assert len(args) == 1 - value = args[0] - else: - value = kwargs - - index = len(self) - if isinstance(value, dict): - self.doc.append({}) - value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value) - elif isinstance(value, list): - self.doc.append([]) - value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value) - else: - self.doc.append(value_to_json(value, item_type=self.item_type)) - super(LazyList, self).append(value) - - def extend(self, x): - self.doc.extend( - [value_to_json(v, item_type=self.item_type) for v in x]) - super(LazyList, self).extend(x) - - def index(self, x, *args): - x = value_to_json(x, item_type=self.item_type) - return self.doc.index(x) - - def insert(self, i, x): - self.__setslice__(i, i, [x]) - - def pop(self, i=-1): - del self.doc[i] - v = super(LazyList, self).pop(i) - return value_to_python(v, item_type=self.item_type) - - def remove(self, x): - del self[self.index(x)] - - def sort(self, cmp=None, key=None, reverse=False): - self.doc.sort(cmp, key, reverse) - list.sort(self, cmp, key, reverse) - - def reverse(self): - self.doc.reverse() - list.reverse(self) - -if support_setproperty: - class SetProperty(Property): - """A property that stores a Python set as a list of unique - elements. - - Note that Python set operations like union that return a set - object do not alter list that will be stored with the next save, - while operations like update that change a set object in-place do - keep the list in sync. - """ - def __init__(self, verbose_name=None, default=None, required=None, - item_type=None, **kwds): - """Construct SetProperty. - - :args verbose_name: Optional verbose name. - - :args default: Optional default value; if omitted, an empty - set is used. - - :args required: True if field is required, default is False. - - :args item_type: Optional data type of items that set - contains. Used to assist with JSON - serialization/deserialization when data is - stored/retireved. - - :args **kwds: Optional additional keyword arguments, passed to - base class. - """ - if default is None: - default = set() - if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES: - raise ValueError('item_type %s not in %s' - % (item_type, ALLOWED_PROPERTY_TYPES)) - self.item_type = item_type - super(SetProperty, self).__init__( - verbose_name=verbose_name, default=default, required=required, - **kwds) - - data_type = set - - def validate(self, value, required=True): - value = super(SetProperty, self).validate(value, required=required) - if value and value is not None: - if not isinstance(value, MutableSet): - raise BadValueError('Property %s must be a set' % self.name) - value = self.validate_set_contents(value) - return value - - def validate_set_contents(self, value): - try: - value = validate_set_content(value, item_type=self.item_type) - except BadValueError: - raise BadValueError( - 'Items of %s set must all be in %s' % - (self.name, ALLOWED_PROPERTY_TYPES)) - return value - - def default_value(self): - """Return default value for set. - - Because the property supplied to 'default' is a static value, - that value must be shallow copied to prevent all fields with - default values from sharing the same instance. - - Returns: - Copy of the default value. - """ - value = super(SetProperty, self).default_value() - if value is None: - return set() - return value.copy() - - def to_python(self, value): - return LazySet(value, item_type=self.item_type) - - def to_json(self, value): - return value_to_json(value, item_type=self.item_type) - - - class LazySet(MutableSet): - """Object to make sure that we keep set and _doc synchronized. - - We sub-class MutableSet and maintain changes in doc. - - Note that methods like union that return a set object do not - alter _doc, while methods like update that change a set object - in-place do keep _doc in sync. - """ - def _map_named_operation(opname): - fn = getattr(MutableSet, opname) - if hasattr(fn, 'im_func'): - fn = fn.im_func - def method(self, other, fn=fn): - if not isinstance(other, MutableSet): - other = self._from_iterable(other) - return fn(self, other) - return method - - issubset = _map_named_operation('__le__') - issuperset = _map_named_operation('__ge__') - symmetric_difference = _map_named_operation('__xor__') - - def __init__(self, doc, item_type=None): - self.item_type = item_type - self.doc = doc - self.elements = set(value_to_python(value, self.item_type) - for value in self.doc) - - def __repr__(self): - return '%s(%r)' % (type(self).__name__, list(self)) - - @classmethod - def _from_iterable(cls, it): - return cls(it) - - def __iand__(self, iterator): - for value in (self.elements - iterator): - self.elements.discard(value) - return self - - def __iter__(self): - return iter(element for element in self.elements) - - def __len__(self): - return len(self.elements) - - def __contains__(self, item): - return item in self.elements - - def __xor__(self, other): - if not isinstance(other, MutableSet): - if not is_iterable(other): - return NotImplemented - other = self._from_iterable(other) - return (self.elements - other) | (other - self.elements) - - def __gt__(self, other): - if not isinstance(other, MutableSet): - return NotImplemented - return other < self.elements - - def __ge__(self, other): - if not isinstance(other, MutableSet): - return NotImplemented - return other <= self.elements - - def __ne__(self, other): - return not (self.elements == other) - - def add(self, value): - self.elements.add(value) - if value not in self.doc: - self.doc.append(value_to_json(value, item_type=self.item_type)) - - def copy(self): - return self.elements.copy() - - def difference(self, other, *args): - return self.elements.difference(other, *args) - - def difference_update(self, other, *args): - for value in other: - self.discard(value) - for arg in args: - self.difference_update(arg) - - def discard(self, value): - self.elements.discard(value) - try: - self.doc.remove(value) - except ValueError: - pass - - def intersection(self, other, *args): - return self.elements.intersection(other, *args) - - def intersection_update(self, other, *args): - if not isinstance(other, MutableSet): - other = set(other) - for value in self.elements - other: - self.discard(value) - for arg in args: - self.intersection_update(arg) - - def symmetric_difference_update(self, other): - if not isinstance(other, MutableSet): - other = set(other) - for value in other: - if value in self.elements: - self.discard(value) - else: - self.add(value) - - def union(self, other, *args): - return self.elements.union(other, *args) - - def update(self, other, *args): - self.elements.update(other, *args) - for element in self.elements: - if element not in self.doc: - self.doc.append( - value_to_json(element, item_type=self.item_type)) - -# some mapping - -MAP_TYPES_PROPERTIES = { - decimal.Decimal: DecimalProperty, - datetime.datetime: DateTimeProperty, - datetime.date: DateProperty, - datetime.time: TimeProperty, - str: StringProperty, - unicode: StringProperty, - bool: BooleanProperty, - int: IntegerProperty, - long: LongProperty, - float: FloatProperty, - list: ListProperty, - dict: DictProperty -} - -if support_setproperty: - MAP_TYPES_PROPERTIES[set] = SetProperty - -def convert_property(value): - """ convert a value to json from Property._to_json """ - if type(value) in MAP_TYPES_PROPERTIES: - prop = MAP_TYPES_PROPERTIES[type(value)]() - value = prop.to_json(value) - return value - - -def value_to_property(value): - """ Convert value in a Property object """ - if type(value) in MAP_TYPES_PROPERTIES: - prop = MAP_TYPES_PROPERTIES[type(value)]() - return prop - else: - return value - -# utilities functions - -def validate_list_content(value, item_type=None): - """ validate type of values in a list """ - return [validate_content(item, item_type=item_type) for item in value] - -def validate_dict_content(value, item_type=None): - """ validate type of values in a dict """ - return dict([(k, validate_content(v, - item_type=item_type)) for k, v in value.iteritems()]) - -def validate_set_content(value, item_type=None): - """ validate type of values in a set """ - return set(validate_content(item, item_type=item_type) for item in value) - -def validate_content(value, item_type=None): - """ validate a value. test if value is in supported types """ - if isinstance(value, list): - value = validate_list_content(value, item_type=item_type) - elif isinstance(value, dict): - value = validate_dict_content(value, item_type=item_type) - elif item_type is not None and not isinstance(value, item_type): - raise BadValueError( - 'Items must all be in %s' % item_type) - elif type(value) not in ALLOWED_PROPERTY_TYPES: - raise BadValueError( - 'Items must all be in %s' % - (ALLOWED_PROPERTY_TYPES)) - return value - -def dict_to_json(value, item_type=None): - """ convert a dict to json """ - return dict([(k, value_to_json(v, item_type=item_type)) for k, v in value.iteritems()]) - -def list_to_json(value, item_type=None): - """ convert a list to json """ - return [value_to_json(item, item_type=item_type) for item in value] - -def value_to_json(value, item_type=None): - """ convert a value to json using appropriate regexp. - For Dates we use ISO 8601. Decimal are converted to string. - - """ - if isinstance(value, datetime.datetime) and is_type_ok(item_type, datetime.datetime): - value = value.replace(microsecond=0).isoformat() + 'Z' - elif isinstance(value, datetime.date) and is_type_ok(item_type, datetime.date): - value = value.isoformat() - elif isinstance(value, datetime.time) and is_type_ok(item_type, datetime.time): - value = value.replace(microsecond=0).isoformat() - elif isinstance(value, decimal.Decimal) and is_type_ok(item_type, decimal.Decimal): - value = unicode(value) - elif isinstance(value, (list, MutableSet)): - value = list_to_json(value, item_type) - elif isinstance(value, dict): - value = dict_to_json(value, item_type) - return value - -def is_type_ok(item_type, value_type): - return item_type is None or item_type == value_type + raise NotImplementedError() -def value_to_python(value, item_type=None): - """ convert a json value to python type using regexp. values converted - have been put in json via `value_to_json` . - """ - data_type = None - if isinstance(value, basestring): - if re_date.match(value) and is_type_ok(item_type, datetime.date): - data_type = datetime.date - elif re_time.match(value) and is_type_ok(item_type, datetime.time): - data_type = datetime.time - elif re_datetime.match(value) and is_type_ok(item_type, datetime.datetime): - data_type = datetime.datetime - elif re_decimal.match(value) and is_type_ok(item_type, decimal.Decimal): - data_type = decimal.Decimal - if data_type is not None: - prop = MAP_TYPES_PROPERTIES[data_type]() - try: - #sometimes regex fail so return value - value = prop.to_python(value) - except: - pass - elif isinstance(value, (list, MutableSet)): - value = list_to_python(value, item_type=item_type) - elif isinstance(value, dict): - value = dict_to_python(value, item_type=item_type) - return value +def _not_implemented(*args, **kwargs): + raise NotImplementedError() -def list_to_python(value, item_type=None): - """ convert a list of json values to python list """ - return [value_to_python(item, item_type=item_type) for item in value] +dict_to_json = _not_implemented +list_to_json = _not_implemented +value_to_json = _not_implemented +dict_to_python = _not_implemented +list_to_python = _not_implemented +convert_property = _not_implemented -def dict_to_python(value, item_type=None): - """ convert a json object values to python dict """ - return dict([(k, value_to_python(v, item_type=item_type)) for k, v in value.iteritems()]) +LazyDict = JsonDict +LazyList = JsonArray +LazySet = JsonSet diff --git a/couchdbkit/schema/properties_proxy.py b/couchdbkit/schema/properties_proxy.py index 052b965..1847c10 100644 --- a/couchdbkit/schema/properties_proxy.py +++ b/couchdbkit/schema/properties_proxy.py @@ -1,383 +1,6 @@ -# -*- coding: utf-8 - -# -# This file is part of couchdbkit released under the MIT license. -# See the NOTICE for more information. +from __future__ import absolute_import +from jsonobject import ObjectProperty, ListProperty, DictProperty -""" Meta properties """ - -from ..exceptions import BadValueError - -from .base import DocumentSchema -from .properties import Property - -__all__ = ['SchemaProperty', 'SchemaListProperty', 'SchemaDictProperty'] - -class SchemaProperty(Property): - """ Schema property. It allows you add a DocumentSchema instance - a member of a Document object. It returns a - `schemaDocumentSchema` object. - - Exemple : - - >>> from couchdbkit import * - >>> class Blog(DocumentSchema): - ... title = StringProperty() - ... author = StringProperty(default="me") - ... - >>> class Entry(Document): - ... title = StringProperty() - ... body = StringProperty() - ... blog = SchemaProperty(Blog()) - ... - >>> test = Entry() - >>> test._doc - {'body': None, 'doc_type': 'Entry', 'title': None, 'blog': {'doc_type': 'Blog', 'author': u'me', 'title': None}} - >>> test.blog.title = "Mon Blog" - >>> test._doc - {'body': None, 'doc_type': 'Entry', 'title': None, 'blog': {'doc_type': 'Blog', 'author': u'me', 'title': u'Mon Blog'}} - >>> test.blog.title - u'Mon Blog' - >>> from couchdbkit import Server - >>> s = Server() - >>> db = s.create_db('couchdbkit_test') - >>> Entry._db = db - >>> test.save() - >>> doc = Entry.objects.get(test.id) - >>> doc.blog.title - u'Mon Blog' - >>> del s['simplecouchdb_test'] - - """ - - def __init__(self, schema, verbose_name=None, name=None, - required=False, validators=None, default=None): - - Property.__init__(self, verbose_name=None, - name=None, required=False, validators=None, default=default) - - use_instance = True - if isinstance(schema, type): - use_instance = False - - elif not isinstance(schema, DocumentSchema): - raise TypeError('schema should be a DocumentSchema instance') - - elif schema.__class__.__name__ == 'DocumentSchema': - use_instance = False - properties = schema._dynamic_properties.copy() - schema = DocumentSchema.build(**properties) - - self._use_instance = use_instance - self._schema = schema - - def default_value(self): - if not self._use_instance: - if self.default: - return self.default - return self._schema() - return self._schema.clone() - - def empty(self, value): - if not hasattr(value, '_doc'): - return True - if not value._doc or value._doc is None: - return True - return False - - def validate(self, value, required=True): - value.validate(required=required) - value = super(SchemaProperty, self).validate(value) - - if value is None: - return value - - if not isinstance(value, DocumentSchema): - raise BadValueError( - 'Property %s must be DocumentSchema instance, not a %s' % (self.name, - type(value).__name__)) - return value - - def to_python(self, value): - if not self._use_instance: - schema = self._schema - else: - schema = self._schema.__class__ - return schema.wrap(value) - - def to_json(self, value): - if not isinstance(value, DocumentSchema): - if not self._use_instance: - schema = self._schema() - else: - schema = self._schema.clone() - - if not isinstance(value, dict): - raise BadValueError("%s is not a dict" % str(value)) - value = schema(**value) - - return value._doc - -class SchemaListProperty(Property): - """A property that stores a list of things. - - """ - def __init__(self, schema, verbose_name=None, default=None, - required=False, **kwds): - - Property.__init__(self, verbose_name, default=default, - required=required, **kwds) - - use_instance = True - if isinstance(schema, type): - use_instance = False - - elif not isinstance(schema, DocumentSchema): - raise TypeError('schema should be a DocumentSchema instance') - - elif schema.__class__.__name__ == 'DocumentSchema': - use_instance = False - properties = schema._dynamic_properties.copy() - schema = DocumentSchema.build(**properties) - - self._use_instance = use_instance - self._schema = schema - - def validate(self, value, required=True): - value = super(SchemaListProperty, self).validate(value, required=required) - if value and value is not None: - if not isinstance(value, list): - raise BadValueError('Property %s must be a list' % self.name) - value = self.validate_list_schema(value, required=required) - return value - - def validate_list_schema(self, value, required=True): - for v in value: - v.validate(required=required) - return value - - def default_value(self): - return [] - - def to_python(self, value): - return LazySchemaList(value, self._schema, self._use_instance) - - def to_json(self, value): - return [svalue_to_json(v, self._schema, self._use_instance) for v in value] - - -class LazySchemaList(list): - - def __init__(self, doc, schema, use_instance, init_vals=None): - list.__init__(self) - - self.schema = schema - self.use_instance = use_instance - self.doc = doc - if init_vals is None: - # just wrap the current values - self._wrap() - else: - # initialize this list and the underlying list - # with the values given. - del self.doc[:] - for item in init_vals: - self.append(item) - - def _wrap(self): - for v in self.doc: - if not self.use_instance: - schema = self.schema() - else: - schema = self.schema.clone() - - value = schema.wrap(v) - list.append(self, value) - - def __delitem__(self, index): - del self.doc[index] - list.__delitem__(self, index) - - def __setitem__(self, index, value): - self.doc[index] = svalue_to_json(value, self.schema, - self.use_instance) - list.__setitem__(self, index, value) - - def __delslice__(self, i, j): - del self.doc[i:j] - super(LazySchemaList, self).__delslice__(i, j) - - def __getslice__(self, i, j): - return LazySchemaList(self.doc[i:j], self.schema, self.use_instance) - - def __setslice__(self, i, j, seq): - self.doc[i:j] = (svalue_to_json(v, self.schema, self.use_instance) - for v in seq) - super(LazySchemaList, self).__setslice__(i, j, seq) - - def __contains__(self, value): - for item in self.doc: - if item == value._doc: - return True - return False - - def append(self, *args, **kwargs): - if args: - assert len(args) == 1 - value = args[0] - else: - value = kwargs - - self.doc.append(svalue_to_json(value, self.schema, - self.use_instance)) - super(LazySchemaList, self).append(value) - - def count(self, value): - return sum(1 for item in self.doc if item == value._doc) - - def extend(self, x): - self.doc.extend([svalue_to_json(item, self.schema, self.use_instance) - for item in x]) - super(LazySchemaList, self).extend(x) - - def index(self, value, *args): - try: - i = max(0, args[0]) - except IndexError: - i = 0 - try: - j = min(len(self.doc), args[1]) - except IndexError: - j = len(self.doc) - if j < 0: - j += len(self.doc) - for idx, item in enumerate(self.doc[i:j]): - if item == value._doc: - return idx + i - else: - raise ValueError('list.index(x): x not in list') - - def insert(self, index, value): - self.__setslice__(index, index, [value]) - - def pop(self, index=-1): - del self.doc[index] - return super(LazySchemaList, self).pop(index) - - def remove(self, value): - try: - del self[self.index(value)] - except ValueError: - raise ValueError('list.remove(x): x not in list') - - def reverse(self): - self.doc.reverse() - list.reverse(self) - - def sort(self, cmp=None, key=None, reverse=False): - self.doc.sort(cmp, key, reverse) - list.sort(self, cmp, key, reverse) - - -class SchemaDictProperty(Property): - """A property that stores a dict of things. - - """ - def __init__(self, schema, verbose_name=None, default=None, - required=False, **kwds): - - Property.__init__(self, verbose_name, default=default, - required=required, **kwds) - - use_instance = True - if isinstance(schema, type): - use_instance = False - - elif not isinstance(schema, DocumentSchema): - raise TypeError('schema should be a DocumentSchema instance') - - elif schema.__class__.__name__ == 'DocumentSchema': - use_instance = False - properties = schema._dynamic_properties.copy() - schema = DocumentSchema.build(**properties) - - self._use_instance = use_instance - self._schema = schema - - def validate(self, value, required=True): - value = super(SchemaDictProperty, self).validate(value, required=required) - if value and value is not None: - if not isinstance(value, dict): - raise BadValueError('Property %s must be a dict' % self.name) - value = self.validate_dict_schema(value, required=required) - return value - - def validate_dict_schema(self, value, required=True): - for v in value.values(): - v.validate(required=required) - return value - - def default_value(self): - return {} - - def to_python(self, value): - return LazySchemaDict(value, self._schema, self._use_instance) - - def to_json(self, value): - return dict([(k, svalue_to_json(v, self._schema, self._use_instance)) for k, v in value.items()]) - - -class LazySchemaDict(dict): - - def __init__(self, doc, schema, use_instance, init_vals=None): - dict.__init__(self) - - self.schema = schema - self.use_instance = use_instance - self.doc = doc - if init_vals is None: - # just wrap the current values - self._wrap() - else: - # initialize this dict and the underlying dict - # with the values given. - del self.doc[:] - for k, v in init_vals: - self[k] = self._wrap(v) - - def _wrap(self): - for k, v in self.doc.items(): - if not self.use_instance: - schema = self.schema() - else: - schema = self.schema.clone() - - value = schema.wrap(v) - dict.__setitem__(self, k, value) - - def __delitem__(self, index): - index = str(index) - del self.doc[index] - dict.__delitem__(self, index) - - def __getitem__(self, index): - index = str(index) - return dict.__getitem__(self, index) - - def __setitem__(self, index, value): - index = str(index) - self.doc[index] = svalue_to_json(value, self.schema, - self.use_instance) - dict.__setitem__(self, index, value) - - -def svalue_to_json(value, schema, use_instance): - if not isinstance(value, DocumentSchema): - if not isinstance(value, dict): - raise BadValueError("%s is not a dict" % str(value)) - - if not use_instance: - value = schema(**value) - else: - value = schema.clone(**value) - - return value._doc +SchemaProperty = ObjectProperty +SchemaListProperty = ListProperty +SchemaDictProperty = DictProperty diff --git a/couchdbkit/schema/util.py b/couchdbkit/schema/util.py index 2069336..64a31ea 100644 --- a/couchdbkit/schema/util.py +++ b/couchdbkit/schema/util.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from couchdbkit.exceptions import DocTypeError @@ -23,7 +24,7 @@ def doctype_attr_of(classes): def get_multi_wrapper(classes): - doctype_attr = doctype_attr_of(classes.values()) + doctype_attr = doctype_attr_of(list(classes.values())) def wrap(doc): doc_type = doc.get(doctype_attr) diff --git a/couchdbkit/utils.py b/couchdbkit/utils.py index c90565d..5c25cca 100644 --- a/couchdbkit/utils.py +++ b/couchdbkit/utils.py @@ -10,13 +10,17 @@ """ from __future__ import with_statement +from __future__ import absolute_import +from __future__ import print_function import codecs import string from hashlib import md5 import os import re import sys -import urllib +import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error +import six +from six.moves import range try: @@ -123,15 +127,15 @@ def validate_dbname(name): """ validate dbname """ if name in SPECIAL_DBS: return True - elif not VALID_DB_NAME.match(urllib.unquote(name)): + elif not VALID_DB_NAME.match(six.moves.urllib.parse.unquote(name)): raise ValueError("Invalid db name: '%s'" % name) return True def to_bytestring(s): """ convert to bytestring an unicode """ - if not isinstance(s, basestring): + if not isinstance(s, six.string_types): return s - if isinstance(s, unicode): + if isinstance(s, six.text_type): return s.encode('utf-8') else: return s @@ -195,7 +199,7 @@ def read_json(filename, use_environment=False): """ try: data = read_file(filename, force_read=True) - except IOError, e: + except IOError as e: if e[0] == 2: return {} raise @@ -206,8 +210,26 @@ def read_json(filename, use_environment=False): try: data = json.loads(data) except ValueError: - print >>sys.stderr, "Json is invalid, can't load %s" % filename + print("Json is invalid, can't load %s" % filename, file=sys.stderr) raise return data +import jsonobject.base + + +class ProxyDict(jsonobject.utils.SimpleDict): + def __init__(self, parent, *args, **kwargs): + super(ProxyDict, self).__init__(*args, **kwargs) + self.parent = parent + + def __setitem__(self, key, value): + self.parent.set_raw_value(key, value) + super(ProxyDict, self).__setitem__(key, value) + + def __delitem__(self, key): + del self.parent[key] + super(ProxyDict, self).__delitem__(key) + + def __copy__(self): + return self.copy() diff --git a/couchdbkit/version.py b/couchdbkit/version.py index e7a28bf..2102051 100644 --- a/couchdbkit/version.py +++ b/couchdbkit/version.py @@ -3,5 +3,6 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. -version_info = (0, 6, 5) -__version__ = ".".join(map(str, version_info)) +from __future__ import absolute_import +version_info = (0, 9, 2) +__version__ = ".".join([str(vi) for vi in version_info]) diff --git a/couchdbkit/wsgi/__init__.py b/couchdbkit/wsgi/__init__.py deleted file mode 100644 index 21d0b7c..0000000 --- a/couchdbkit/wsgi/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 - -# -# This file is part of couchdbkit released under the MIT license. -# See the NOTICE for more information. - - diff --git a/couchdbkit/wsgi/handler.py b/couchdbkit/wsgi/handler.py deleted file mode 100644 index fafdb1f..0000000 --- a/couchdbkit/wsgi/handler.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 - -# -# This file is part of couchdbkit released under the MIT license. -# See the NOTICE for more information. - -import sys -import StringIO -import traceback -from urllib import unquote - -from restkit.util import url_encode - -from .. import __version__ -from ..external import External - -def _normalize_name(name): - return "-".join([w.lower().capitalize() for w in name.split("-")]) - -class WSGIRequest(object): - - SERVER_VERSION = "couchdbkit/%s" % __version__ - - def __init__(self, line): - self.line = line - self.response_status = 200 - self.response_headers = {} - self.start_response_called = False - - def read(self): - headers = self.parse_headers() - - length = headers.get("CONTENT_LENGTH") - if self.line["body"] and self.line["body"] != "undefined": - length = len(self.line["body"]) - body = StringIO.StringIO(self.line["body"]) - - else: - body = StringIO.StringIO() - - # path - script_name, path_info = self.line['path'][:2], self.line['path'][2:] - if path_info: - path_info = "/%s" % "/".join(path_info) - else: - path_info = "" - script_name = "/%s" % "/".join(script_name) - - # build query string - args = [] - query_string = None - for k, v in self.line["query"].items(): - if v is None: - continue - else: - args.append((k,v)) - if args: query_string = url_encode(dict(args)) - - # raw path could be useful - path = "%s%s" % (path_info, query_string) - - # get server address - if ":" in self.line["headers"]["Host"]: - server_address = self.line["headers"]["Host"].split(":") - else: - server_address = (self.line["headers"]["Host"], 80) - - environ = { - "wsgi.url_scheme": 'http', - "wsgi.input": body, - "wsgi.errors": StringIO.StringIO(), - "wsgi.version": (1, 0), - "wsgi.multithread": False, - "wsgi.multiprocess": True, - "wsgi.run_once": False, - "SCRIPT_NAME": script_name, - "SERVER_SOFTWARE": self.SERVER_VERSION, - "COUCHDB_INFO": self.line["info"], - "COUCHDB_REQUEST": self.line, - "REQUEST_METHOD": self.line["verb"].upper(), - "PATH_INFO": unquote(path_info), - "QUERY_STRING": query_string, - "RAW_URI": path, - "CONTENT_TYPE": headers.get('CONTENT-TYPE', ''), - "CONTENT_LENGTH": length, - "REMOTE_ADDR": self.line['peer'], - "REMOTE_PORT": 0, - "SERVER_NAME": server_address[0], - "SERVER_PORT": int(server_address[1]), - "SERVER_PROTOCOL": "HTTP/1.1" - } - - for key, value in headers.items(): - key = 'HTTP_' + key.replace('-', '_') - if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): - environ[key] = value - - return environ - - def start_response(self, status, response_headers): - self.response_status = int(status.split(" ")[0]) - for name, value in response_headers: - name = _normalize_name(name) - self.response_headers[name] = value.strip() - self.start_response_called = True - - def parse_headers(self): - headers = {} - for name, value in self.line.get("headers", {}).items(): - name = name.strip().upper().encode("utf-8") - headers[name] = value.strip().encode("utf-8") - return headers - -class WSGIHandler(External): - - def __init__(self, application, stdin=sys.stdin, - stdout=sys.stdout): - External.__init__(self, stdin=stdin, stdout=stdout) - self.app = application - - def handle_line(self, line): - try: - req = WSGIRequest(line) - response = self.app(req.read(), req.start_response) - except: - self.send_response(500, "".join(traceback.format_exc()), - {"Content-Type": "text/plain"}) - return - - content = "".join(response).encode("utf-8") - self.send_response(req.response_status, content, req.response_headers) - - diff --git a/couchdbkit/wsgi/proxy.py b/couchdbkit/wsgi/proxy.py deleted file mode 100644 index 0849975..0000000 --- a/couchdbkit/wsgi/proxy.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 - -# -# This file is part of couchdbkit released under the MIT license. -# See the NOTICE for more information. - -import urlparse - -from restkit.contrib.wsgi_proxy import HostProxy, ALLOWED_METHODS -from webob import Request - -class CouchdbProxy(object): - """\ - WSGI application to proxy a couchdb server. - - Simple usage to proxy a CouchDB server on default url:: - - from couchdbkit.wsgi import CouchdbProxy - application = CouchdbProxy() - """ - - def __init__(self, uri="http://127.0.0.1:5984", - allowed_method=ALLOWED_METHODS, **kwargs): - self.proxy = HostProxy(uri, allowed_methods=allowed_method, - **kwargs) - - def do_proxy(self, req, environ, start_response): - """\ - return proxy response. Can be overrided to add authentification and - such. It's better to override do_proxy method than the __call__ - """ - return req.get_response(self.proxy) - - def __call__(self, environ, start_response): - req = Request(environ) - if 'RAW_URI' in req.environ: - # gunicorn so we can use real path non encoded - u = urlparse.urlparse(req.environ['RAW_URI']) - req.environ['PATH_INFO'] = u.path - - resp = self.do_proy(req, environ, start_response) - return resp(environ, start_response) diff --git a/distribute_setup.py b/distribute_setup.py index cfb3bbe..ef014ed 100644 --- a/distribute_setup.py +++ b/distribute_setup.py @@ -13,6 +13,7 @@ This file can also be run as a script to install or upgrade setuptools. """ +from __future__ import absolute_import import os import sys import time @@ -178,7 +179,7 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, try: from urllib.request import urlopen except ImportError: - from urllib2 import urlopen + from six.moves.urllib.request import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) diff --git a/doc/couchdbkit.org/buildweb.py b/doc/couchdbkit.org/buildweb.py index ea538bf..e01d473 100755 --- a/doc/couchdbkit.org/buildweb.py +++ b/doc/couchdbkit.org/buildweb.py @@ -17,6 +17,8 @@ from __future__ import with_statement +from __future__ import absolute_import +from __future__ import print_function import codecs import datetime import os @@ -27,6 +29,7 @@ from jinja2 import Environment from jinja2.loaders import FileSystemLoader from jinja2.utils import open_if_exists +import six try: import markdown except ImportError: @@ -106,7 +109,7 @@ def process_directory(self, current_dir, files, target_path): files = [f for f in files if os.path.splitext(f)[1] in conf.EXTENSIONS] blog = None for f in files: - print "process %s" % f + print("process %s" % f) page = Page(self, f, current_dir, target_path) if page.is_blog() and f == "index.txt" or f == "archives.txt": continue @@ -119,14 +122,14 @@ def process_directory(self, current_dir, files, target_path): if not source_newer(page.finput, page.foutput) and f != "index.txt": continue - print "write %s" % page.foutput + print("write %s" % page.foutput) try: f = codecs.open(page.foutput, 'w', 'utf-8') try: f.write(page.render()) finally: f.close() - except (IOError, OSError), err: + except (IOError, OSError) as err: raise self.sitemap.append(page) if blog is not None: @@ -218,7 +221,7 @@ def render(self): archives_page = None if not os.path.isfile(index_page.finput): - raise IOError, "index.txt isn't found in %s" % self.current_dir + raise IOError("index.txt isn't found in %s" % self.current_dir) self.pages.sort(lambda a, b: a.headers['pubDate'] - b.headers['pubDate'], reverse=True) @@ -249,7 +252,7 @@ def render(self): f.write(page.render()) finally: f.close() - except (IOError, OSError), err: + except (IOError, OSError) as err: raise self.site.sitemap.append(page) @@ -294,20 +297,20 @@ def parse(self): (header_lines,body) = raw.split("\n\n", 1) for header in header_lines.split("\n"): (name, value) = header.split(": ", 1) - headers[name.lower()] = unicode(value.strip()) + headers[name.lower()] = six.text_type(value.strip()) self.headers = headers self.headers['pubDate'] = os.stat(self.finput)[ST_CTIME] self.headers['published'] = datetime.datetime.fromtimestamp(self.headers['pubDate']) self.body = body content_type = self.headers.get('content_type', conf.CONTENT_TYPE) - if content_type in self.content_types.keys(): + if content_type in list(self.content_types.keys()): self.foutput = os.path.join(self.target_path, "%s.%s" % (os.path.splitext(self.filename)[0], self.files_ext[content_type])) self.url = self.get_url() else: - raise TypeError, "Unknown content_type" + raise TypeError("Unknown content_type") except: - raise TypeError, "Invalid page file format for %s" % self.finput + raise TypeError("Invalid page file format for %s" % self.finput) self.parsed = True def is_blog(self): @@ -320,11 +323,11 @@ def render(self): self.parse() template = self.headers.get('template', conf.DEFAULT_TEMPLATE) content_type = self.headers.get('content_type', conf.CONTENT_TYPE) - if content_type in self.content_types.keys(): + if content_type in list(self.content_types.keys()): fun = getattr(self, "render_%s" % content_type) return fun(template) else: - raise TypeError, "Unknown content_type" + raise TypeError("Unknown content_type") def _render_html(self, template, body): kwargs = { @@ -341,13 +344,13 @@ def render_html(self, template): def render_markdown(self, template): if markdown is None: - raise TypeError, "markdown isn't suported" + raise TypeError("markdown isn't suported") body = convert_markdown(self.body) return self._render_html(template, body) def render_textile(self, template): if textile is None: - raise TypeError, "textile isn't suported" + raise TypeError("textile isn't suported") body = convert_textile(self.body) return self._render_html(template, body) diff --git a/doc/couchdbkit.org/conf.py b/doc/couchdbkit.org/conf.py index e788353..bcbc502 100644 --- a/doc/couchdbkit.org/conf.py +++ b/doc/couchdbkit.org/conf.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import import os, platform # options diff --git a/examples/django_blogapp/blog_app/models.py b/examples/django_blogapp/blog_app/models.py index ed5ccc8..6499c9b 100644 --- a/examples/django_blogapp/blog_app/models.py +++ b/examples/django_blogapp/blog_app/models.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from datetime import datetime from couchdbkit.ext.django.schema import Document, StringProperty, \ diff --git a/examples/django_blogapp/blog_app/tests.py b/examples/django_blogapp/blog_app/tests.py index 501deb7..e585808 100644 --- a/examples/django_blogapp/blog_app/tests.py +++ b/examples/django_blogapp/blog_app/tests.py @@ -5,6 +5,7 @@ Replace this with more appropriate tests for your application. """ +from __future__ import absolute_import from django.test import TestCase diff --git a/examples/django_blogapp/blog_app/views.py b/examples/django_blogapp/blog_app/views.py index 8834e63..997e86b 100644 --- a/examples/django_blogapp/blog_app/views.py +++ b/examples/django_blogapp/blog_app/views.py @@ -1,10 +1,11 @@ +from __future__ import absolute_import from couchdbkit.ext.django.forms import DocumentForm from django.forms.fields import CharField from django.forms.widgets import HiddenInput from django.shortcuts import render_to_response from django.template import RequestContext -from models import Post, Comment +from .models import Post, Comment class PostForm(DocumentForm): diff --git a/examples/django_blogapp/settings.py b/examples/django_blogapp/settings.py index d1fa5ba..da59a5b 100644 --- a/examples/django_blogapp/settings.py +++ b/examples/django_blogapp/settings.py @@ -1,4 +1,5 @@ # Django settings for django_blogapp project. +from __future__ import absolute_import import os PROJECT_PATH = os.path.dirname(os.path.abspath(__file__)) diff --git a/examples/django_blogapp/urls.py b/examples/django_blogapp/urls.py index f8efdee..4bbb5b1 100644 --- a/examples/django_blogapp/urls.py +++ b/examples/django_blogapp/urls.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from django.conf.urls import patterns, url diff --git a/examples/django_blogapp/wsgi.py b/examples/django_blogapp/wsgi.py index ab90313..8cfdede 100644 --- a/examples/django_blogapp/wsgi.py +++ b/examples/django_blogapp/wsgi.py @@ -13,6 +13,7 @@ framework. """ +from __future__ import absolute_import import os # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks diff --git a/examples/djangoapp/greeting/models.py b/examples/djangoapp/greeting/models.py index 9f41e09..c8205ee 100755 --- a/examples/djangoapp/greeting/models.py +++ b/examples/djangoapp/greeting/models.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from datetime import datetime from django.db import models diff --git a/examples/djangoapp/greeting/views.py b/examples/djangoapp/greeting/views.py index a11b58c..ff21698 100755 --- a/examples/djangoapp/greeting/views.py +++ b/examples/djangoapp/greeting/views.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import from datetime import datetime from django.shortcuts import render_to_response as render from django.template import RequestContext, loader, Context diff --git a/examples/djangoapp/manage.py b/examples/djangoapp/manage.py index 5e78ea9..727a271 100755 --- a/examples/djangoapp/manage.py +++ b/examples/djangoapp/manage.py @@ -1,7 +1,8 @@ #!/usr/bin/env python +from __future__ import absolute_import from django.core.management import execute_manager try: - import settings # Assumed to be in the same directory. + from . import settings # Assumed to be in the same directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) diff --git a/examples/djangoapp/run.py b/examples/djangoapp/run.py index 182487c..8907764 100755 --- a/examples/djangoapp/run.py +++ b/examples/djangoapp/run.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import from couchdbkit.wsgi.handler import WSGIHandler import os import sys diff --git a/examples/djangoapp/settings.py b/examples/djangoapp/settings.py index a6a6d77..03ef6be 100755 --- a/examples/djangoapp/settings.py +++ b/examples/djangoapp/settings.py @@ -1,5 +1,6 @@ # Django settings for testapp project. +from __future__ import absolute_import import os, platform PROJECT_PATH = os.path.dirname(os.path.abspath(__file__)) diff --git a/examples/djangoapp/urls.py b/examples/djangoapp/urls.py index 4a09ece..bb0cc79 100755 --- a/examples/djangoapp/urls.py +++ b/examples/djangoapp/urls.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from django.conf.urls.defaults import * urlpatterns = patterns('', diff --git a/examples/pyramidapp/pyramid_couchdb_example/__init__.py b/examples/pyramidapp/pyramid_couchdb_example/__init__.py index d15a2c0..1ed361a 100644 --- a/examples/pyramidapp/pyramid_couchdb_example/__init__.py +++ b/examples/pyramidapp/pyramid_couchdb_example/__init__.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from pyramid.config import Configurator from pyramid.events import subscriber, ApplicationCreated from couchdbkit import * diff --git a/examples/pyramidapp/pyramid_couchdb_example/tests.py b/examples/pyramidapp/pyramid_couchdb_example/tests.py index b51e540..2bfb564 100644 --- a/examples/pyramidapp/pyramid_couchdb_example/tests.py +++ b/examples/pyramidapp/pyramid_couchdb_example/tests.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import unittest from pyramid import testing diff --git a/examples/pyramidapp/pyramid_couchdb_example/views.py b/examples/pyramidapp/pyramid_couchdb_example/views.py index 859f991..694c788 100644 --- a/examples/pyramidapp/pyramid_couchdb_example/views.py +++ b/examples/pyramidapp/pyramid_couchdb_example/views.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import datetime from pyramid.view import view_config diff --git a/examples/pyramidapp/setup.py b/examples/pyramidapp/setup.py index 774d729..1707acd 100644 --- a/examples/pyramidapp/setup.py +++ b/examples/pyramidapp/setup.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import os from setuptools import setup, find_packages diff --git a/examples/wsgi/test.py b/examples/wsgi/test.py index 5a79960..f894fc4 100755 --- a/examples/wsgi/test.py +++ b/examples/wsgi/test.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import import couchdbkit from couchdbkit.contrib import WSGIHandler import json diff --git a/requirements.txt b/requirements.txt index c9da5f5..29c9513 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,4 @@ restkit>=4.2.2 +jsonobject>=0.6.0 +cloudant==2.7.0 +six==1.11.0 diff --git a/requirements_dev.txt b/requirements_dev.txt index c38e143..3d8259b 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,3 +1,2 @@ -unittest2 nose restkit>=4.2.2 diff --git a/setup.cfg b/setup.cfg index 46c0ec2..ddb812b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,3 +2,6 @@ exclude=(django) with-coverage=1 cover-package=couchdbkit + +[bdist_wheel] +universal = 1 diff --git a/setup.py b/setup.py index f87519f..7470657 100755 --- a/setup.py +++ b/setup.py @@ -3,12 +3,13 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import from imp import load_source import os import sys -if not hasattr(sys, 'version_info') or sys.version_info < (2, 6, 0, 'final'): - raise SystemExit("couchdbkit requires Python 2.6 or later.") +if not hasattr(sys, 'version_info') or sys.version_info < (2, 7, 0, 'final'): + raise SystemExit("couchdbkit requires Python 2.7 or later.") from setuptools import setup, find_packages @@ -18,15 +19,15 @@ setup( - name = 'couchdbkit', + name = 'jsonobject-couchdbkit', version = version.__version__, description = 'Python couchdb kit', - long_description = file( + long_description = open( os.path.join( os.path.dirname(__file__), 'README.rst' - ) + ), 'rt' ).read(), author = 'Benoit Chesneau', author_email = 'benoitc@e-engura.com', @@ -40,8 +41,6 @@ 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.5', - 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Database', 'Topic :: Utilities', @@ -51,8 +50,14 @@ zip_safe = False, - install_requires = [ 'restkit>=4.2.2' ], - + install_requires = [ + 'restkit>=4.2.2', + 'jsonobject>=0.9.1', + 'cloudant==2.7.0', + 'six==1.11.0', + ], + provides=['couchdbkit'], + obsoletes=['couchdbkit'], entry_points=""" [couchdbkit.consumers] sync=couchdbkit.consumer.sync:SyncConsumer diff --git a/tests/client_test.py b/tests/client_test.py index 803e941..1447f08 100644 --- a/tests/client_test.py +++ b/tests/client_test.py @@ -3,13 +3,12 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. # +from __future__ import absolute_import +import six __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' import copy -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from couchdbkit import ResourceNotFound, RequestFailed, \ ResourceConflict @@ -31,7 +30,7 @@ def tearDown(self): def testGetInfo(self): info = self.Server.info() - self.assert_(info.has_key('version')) + self.assert_('version' in info) def testCreateDb(self): res = self.Server.create_db('couchdbkit_test') @@ -79,7 +78,7 @@ def testServerContain(self): def testGetUUIDS(self): uuid = self.Server.next_uuid() - self.assert_(isinstance(uuid, basestring) == True) + self.assert_(isinstance(uuid, six.string_types) == True) self.assert_(len(self.Server._uuids) == 999) uuid2 = self.Server.next_uuid() self.assert_(uuid != uuid2) @@ -500,18 +499,18 @@ def testDeleteMultipleDocs(self): def testMultipleDocCOnflict(self): db = self.Server.create_db('couchdbkit_test') docs = [ - { 'string': 'test', 'number': 4 }, - { 'string': 'test', 'number': 5 }, - { 'string': 'test', 'number': 4 }, - { 'string': 'test', 'number': 6 } + {'string': 'test', 'number': 4}, + {'string': 'test', 'number': 5}, + {'string': 'test', 'number': 4}, + {'string': 'test', 'number': 6} ] db.bulk_save(docs) - self.assert_(len(db) == 4) + self.assertEqual(len(db), 4) docs1 = [ - docs[0], - docs[1], - {'_id': docs[2]['_id'], 'string': 'test', 'number': 4 }, - {'_id': docs[3]['_id'], 'string': 'test', 'number': 6 } + docs[0], + docs[1], + {'_id': docs[2]['_id'], 'string': 'test', 'number': 4}, + {'_id': docs[3]['_id'], 'string': 'test', 'number': 6} ] self.assertRaises(BulkSaveError, db.bulk_save, docs1) @@ -519,39 +518,21 @@ def testMultipleDocCOnflict(self): docs2 = [ docs1[0], docs1[1], - {'_id': docs[2]['_id'], 'string': 'test', 'number': 4 }, - {'_id': docs[3]['_id'], 'string': 'test', 'number': 6 } + {'_id': docs[2]['_id'], 'string': 'test', 'number': 4}, + {'_id': docs[3]['_id'], 'string': 'test', 'number': 6} ] doc23 = docs2[3].copy() all_errors = [] try: db.bulk_save(docs2) - except BulkSaveError, e: + except BulkSaveError as e: all_errors = e.errors - self.assert_(len(all_errors) == 2) - self.assert_(all_errors[0]['error'] == 'conflict') - self.assert_(doc23 == docs2[3]) - - docs3 = [ - docs2[0], - docs2[1], - {'_id': docs[2]['_id'], 'string': 'test', 'number': 4 }, - {'_id': docs[3]['_id'], 'string': 'test', 'number': 6 } - ] - - doc33 = docs3[3].copy() - all_errors2 = [] - try: - db.bulk_save(docs3, all_or_nothing=True) - except BulkSaveError, e: - all_errors2 = e.errors - - self.assert_(len(all_errors2) == 0) - self.assert_(doc33 != docs3[3]) + self.assertEqual(len(all_errors), 2) + self.assertEqual(all_errors[0]['error'], 'conflict') + self.assertEqual(doc23, docs2[3]) del self.Server['couchdbkit_test'] - def testCopy(self): db = self.Server.create_db('couchdbkit_test') doc = { "f": "a" } @@ -584,8 +565,9 @@ def testCopy(self): def testSetSecurity(self): db = self.Server.create_db('couchdbkit_test') - res = db.set_security({"meta": "test"}) - self.assert_(res['ok'] == True) + sec_doc = {"meta": "test"} + res = db.set_security(sec_doc) + self.assertEquals(res, sec_doc) del self.Server['couchdbkit_test'] def testGetSecurity(self): @@ -692,26 +674,6 @@ def testCount(self): self.assert_(count == 2) del self.Server['couchdbkit_test'] - def testTemporaryView(self): - db = self.Server.create_db('couchdbkit_test') - # save 2 docs - doc1 = { '_id': 'test', 'string': 'test', 'number': 4, - 'docType': 'test' } - db.save_doc(doc1) - doc2 = { '_id': 'test2', 'string': 'test', 'number': 2, - 'docType': 'test'} - db.save_doc(doc2) - - design_doc = { - "map": """function(doc) { if (doc.docType == "test") { emit(doc._id, doc); -}}""" - } - - results = db.temp_view(design_doc) - self.assert_(len(results) == 2) - del self.Server['couchdbkit_test'] - - def testView2(self): db = self.Server.create_db('couchdbkit_test') # save 2 docs @@ -862,4 +824,3 @@ class B(Document): if __name__ == '__main__': unittest.main() - diff --git a/tests/test_changes.py b/tests/test_changes.py index 7f95adc..203458e 100644 --- a/tests/test_changes.py +++ b/tests/test_changes.py @@ -3,14 +3,13 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. # +from __future__ import absolute_import +from six.moves import range __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' import threading import time -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from couchdbkit import * from couchdbkit.changes import ChangesStream, fold, foreach diff --git a/tests/test_consumer.py b/tests/test_consumer.py index 8397999..cb7a2b9 100644 --- a/tests/test_consumer.py +++ b/tests/test_consumer.py @@ -3,14 +3,13 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. # +from __future__ import absolute_import +from six.moves import range __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' import threading import time -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from couchdbkit import * @@ -31,19 +30,18 @@ def _delete_db(self): except: pass - def test_fetch(self): res1 = self.consumer.fetch() - self.assert_("last_seq" in res1) - self.assert_(res1["last_seq"] == 0) - self.assert_(res1["results"] == []) + self.assertTrue("last_seq" in res1) + self.assertTrue(res1["last_seq"].startswith("0")) + self.assertEqual(res1["results"], []) doc = {} self.db.save_doc(doc) res2 = self.consumer.fetch() - self.assert_(res2["last_seq"] == 1) - self.assert_(len(res2["results"]) == 1) + self.assertTrue(res2["last_seq"].startswith("1")) + self.assertEqual(len(res2["results"]), 1) line = res2["results"][0] - self.assert_(line["id"] == doc["_id"]) + self.assertEqual(line["id"], doc["_id"]) def test_longpoll(self): diff --git a/tests/test_loaders.py b/tests/test_loaders.py index 54edd68..416e490 100644 --- a/tests/test_loaders.py +++ b/tests/test_loaders.py @@ -3,16 +3,14 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. # +from __future__ import absolute_import __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' import base64 import os import shutil import tempfile -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from restkit import ResourceNotFound, RequestFailed diff --git a/tests/test_resource.py b/tests/test_resource.py index e2b20d7..9ae12f5 100644 --- a/tests/test_resource.py +++ b/tests/test_resource.py @@ -3,12 +3,10 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. # +from __future__ import absolute_import __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from restkit.errors import RequestFailed, RequestError from couchdbkit.resource import CouchdbResource @@ -31,7 +29,7 @@ def tearDown(self): def testGetInfo(self): info = self.couchdb.get().json_body - self.assert_(info.has_key('version')) + self.assert_('version' in info) def testCreateDb(self): res = self.couchdb.put('/couchdkbit_test').json_body diff --git a/tests/test_schema.py b/tests/test_schema.py index fbacbd2..57bcdf0 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -3,14 +3,14 @@ # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. +from __future__ import absolute_import +from __future__ import print_function +import six __author__ = 'benoitc@e-engura.com (Benoît Chesneau)' import datetime import decimal -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from couchdbkit import * from couchdbkit.schema.properties import support_setproperty @@ -44,8 +44,8 @@ class Test(Document): doc.foo="test" try: doc.bar="bla" - except AttributeError, e: - self.assert_(str(e) == "bar is not defined in schema (not a valid property)") + except AttributeError as e: + self.assertEqual(str(e), "'bar' is not defined in schema (not a valid property)") doc.save() self.assert_(not hasattr(doc, "bar")) assert doc._doc['foo'] == "test" @@ -59,8 +59,8 @@ class Test(StaticDocument): doc.foo="test" try: doc.bar="bla" - except AttributeError, e: - self.assert_(str(e) == "bar is not defined in schema (not a valid property)") + except AttributeError as e: + self.assertEqual(str(e), "'bar' is not defined in schema (not a valid property)") doc.save() self.assert_(not hasattr(doc, "bar")) self.assert_(doc._doc['foo'] == "test") @@ -134,9 +134,9 @@ class Test(Document): string2 = StringProperty() doc = Test() - self.assert_(len(doc) == 3) + self.assert_(len(doc) == 6) doc.string3 = "4" - self.assert_(len(doc) == 4) + self.assert_(len(doc) == 7) def testStore(self): db = self.server.create_db('couchdbkit_test') @@ -181,14 +181,14 @@ class Test2(Document): try: Test.bulk_save( [doc1, doc2, doc3] ) - except TypeError, e: + except TypeError as e: self.assert_(str(e)== "doc database required to save document" ) Test.set_db( db ) bad_doc = Test2(string="bad_doc") try: Test.bulk_save( [doc1, doc2, doc3, bad_doc] ) - except ValueError, e: + except ValueError as e: self.assert_(str(e) == "one of your documents does not have the correct type" ) Test.bulk_save( [doc1, doc2, doc3] ) @@ -214,8 +214,6 @@ class Test2(Document): self.server.delete_db('couchdbkit_test') - - def testGet(self): db = self.server.create_db('couchdbkit_test') class Test(Document): @@ -257,7 +255,7 @@ class Test(Document): doc1 = Test.get(doc._id) self.server.delete_db('couchdbkit_test') - self.assert_(isinstance(doc1.field, basestring)) + self.assert_(isinstance(doc1.field, six.string_types)) self.assert_(isinstance(doc1.field1, datetime.datetime)) self.assert_(isinstance(doc1.field2, datetime.date)) self.assert_(isinstance(doc1.field3, datetime.time)) @@ -323,23 +321,6 @@ class TestDoc2(TestDoc): self.assert_(len(doc2._dynamic_properties) == 1) - def testClone(self): - class A(DocumentSchema): - s = StringProperty() - - class B(Document): - a = SchemaProperty(A) - s1 = StringProperty() - - b = B() - b.s1 = "test1" - b.a.s = "test" - b1 = b.clone() - - self.assert_(b1.s1 == "test1") - self.assert_('s' in b1._doc['a']) - self.assert_(b1.a.s == "test") - def testView(self): class TestDoc(Document): field1 = StringProperty() @@ -445,7 +426,7 @@ class TestDoc(Document): results3 = TestDoc.view('test/all', include_docs=True, wrapper=lambda row: row['doc']['field1']) self.assert_(len(results3) == 2) - self.assert_(isinstance(results3.first(), unicode) == True) + self.assert_(isinstance(results3.first(), six.text_type) == True) self.server.delete_db('couchdbkit_test') def test_wrong_doc_type(self): @@ -550,31 +531,6 @@ class TestDoc(Document): self.assert_(len(results) == 2) self.server.delete_db('couchdbkit_test') - - def testTempView(self): - class TestDoc(Document): - field1 = StringProperty() - field2 = StringProperty() - - design_doc = { - "map": """function(doc) { if (doc.doc_type == "TestDoc") { emit(doc._id, doc); -}}""" - } - - doc = TestDoc(field1="a", field2="b") - doc1 = TestDoc(field1="c", field2="d") - - db = self.server.create_db('couchdbkit_test') - TestDoc._db = db - - doc.save() - doc1.save() - results = TestDoc.temp_view(design_doc) - self.assert_(len(results) == 2) - doc3 = list(results)[0] - self.assert_(hasattr(doc3, "field1")) - self.server.delete_db('couchdbkit_test') - def testDocumentAttachments(self): db = self.server.create_db('couchdbkit_test') @@ -649,12 +605,34 @@ class Test(Document): db.bulk_delete([doc1, doc2, doc3]) - print list(db.all_docs(include_docs=True)) + print(list(db.all_docs(include_docs=True))) self.assert_(len(db) == 0) self.assert_(db.info()['doc_del_count'] == 3) self.server.delete_db('couchdbkit_test') + def testDocumentBulkDelete(self): + db = self.server.create_db('couchdbkit_test') + + class Test(Document): + string = StringProperty() + Test._db = db + + doc = Test() + doc.string = "test" + doc.save() + doc2 = Test() + doc2.string = "test2" + doc2.save() + + Test.bulk_delete([doc, doc2]) + + for doc_id in [doc._id, doc2._id]: + with self.assertRaises(ResourceNotFound): + Test.get(doc_id) + + self.server.delete_db('couchdbkit_test') + class PropertyTestCase(unittest.TestCase): @@ -729,7 +707,7 @@ def ftest(): test_dates = [ ([2008, 11, 10, 8, 0, 0], "2008-11-10T08:00:00Z"), ([9999, 12, 31, 23, 59, 59], '9999-12-31T23:59:59Z'), - ([0001, 1, 1, 0, 0, 1], '0001-01-01T00:00:01Z'), + ([1, 1, 1, 0, 0, 1], '0001-01-01T00:00:01Z'), ] for date, date_str in test_dates: @@ -738,8 +716,6 @@ def ftest(): value = test.field self.assert_(isinstance(value, datetime.datetime)) - - def testDateProperty(self): class Test(Document): field = DateProperty() @@ -754,7 +730,6 @@ def ftest(): value = test.field self.assert_(isinstance(value, datetime.date)) - def testTimeProperty(self): class Test(Document): field = TimeProperty() @@ -780,7 +755,7 @@ class Test(Document): self.assert_(test._doc['field'] == "test") self.assert_(test._doc['field1'] == "2008-11-10T08:00:00Z") - self.assert_(isinstance(test.field, basestring)) + self.assert_(isinstance(test.field, six.string_types)) self.assert_(isinstance(test.field1, datetime.datetime)) Test._db = self.db test.save() @@ -788,7 +763,7 @@ class Test(Document): v = doc2.field v1 = doc2.field1 - self.assert_(isinstance(v, basestring)) + self.assert_(isinstance(v, six.string_types)) self.assert_(isinstance(v1, datetime.datetime)) def testMixDynamicProperties(self): @@ -810,8 +785,7 @@ class Test(Document): vd = doc2.dynamic_field self.assert_(isinstance(v1, datetime.datetime)) - self.assert_(isinstance(vd, basestring)) - + self.assert_(isinstance(vd, six.string_types)) def testSchemaProperty1(self): class MySchema(DocumentSchema): @@ -836,7 +810,6 @@ class MyDoc(Document): self.assert_(doc2.schema.astring == u"test") self.assert_(doc2._doc['schema']['astring'] == u"test") - def testSchemaPropertyWithRequired(self): class B( Document ): class b_schema(DocumentSchema): @@ -866,11 +839,11 @@ class DocOne(Document): class DocTwo(Document): name = StringProperty() - one = SchemaProperty(DocOne()) + one = SchemaProperty(DocOne) class DocThree(Document): name = StringProperty() - two = SchemaProperty(DocTwo()) + two = SchemaProperty(DocTwo) one = DocOne(name='one') two = DocTwo(name='two', one=one) @@ -929,7 +902,7 @@ class DocTwo(Document): def testSchemaWithPythonTypes(self): class A(Document): - c = unicode() + c = six.text_type() i = int(4) a = A() self.assert_(a._doc == {'c': u'', 'doc_type': 'A', 'i': 4}) @@ -950,42 +923,18 @@ class A(Document): self.assert_(b.s is None) self.assert_(b._doc['s'] is None) - def testSchemaBuild(self): - schema = DocumentSchema(i = IntegerProperty()) - C = DocumentSchema.build(**schema._dynamic_properties) - self.assert_('i' in C._properties) - self.assert_(isinstance(C.i, IntegerProperty)) - - c = C() - self.assert_(c._doc_type == 'AnonymousSchema') - self.assert_(c._doc == {'doc_type': 'AnonymousSchema', 'i': - None}) - - - schema2 = DocumentSchema(i = IntegerProperty(default=-1)) - C3 = DocumentSchema.build(**schema2._dynamic_properties) - c3 = C3() - - self.assert_(c3._doc == {'doc_type': 'AnonymousSchema', 'i': - -1}) - self.assert_(c3.i == -1) - - def bad_value(): - c3.i = "test" - - self.assertRaises(BadValueError, bad_value) - self.assert_(c3.i == -1) - def testSchemaPropertyValidation2(self): + class Bar(DocumentSchema): + foo = IntegerProperty() + class Foo( Document ): - bar = SchemaProperty(DocumentSchema(foo=IntegerProperty())) + bar = SchemaProperty(Bar) doc = Foo() def bad_value(): doc.bar.foo = "bla" self.assertRaises(BadValueError, bad_value) - def testDynamicSchemaProperty(self): from datetime import datetime class A(DocumentSchema): @@ -996,7 +945,7 @@ class A(DocumentSchema): class B(Document): s1 = StringProperty() s2 = StringProperty() - sm = SchemaProperty(a) + sm = SchemaProperty(A, default=lambda: a) b = B() self.assert_(b._doc == {'doc_type': 'B', 's1': None, 's2': None, @@ -1089,7 +1038,6 @@ class B(Document): self.assert_(len(b1.slm) == 2) self.assert_(b1.slm[0].s == "test") - def testSchemaListPropertySlice(self): """SchemaListProperty slice methods """ @@ -1111,8 +1059,8 @@ class B(Document): self.assertEqual([b.slm[0].s, b.slm[1].s], [a1.s, a2.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}] }) b.slm.append(a3) c = b.slm[1:3] @@ -1123,10 +1071,9 @@ class B(Document): self.assertEqual(b.slm[0].s, a1.s) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}] }) - def testSchemaListPropertyContains(self): """SchemaListProperty contains method """ @@ -1145,7 +1092,6 @@ class B(Document): self.assertTrue(a1 in b.slm) self.assertFalse(a2 in b.slm) - def testSchemaListPropertyCount(self): """SchemaListProperty count method """ @@ -1163,7 +1109,6 @@ class B(Document): b.slm = [a1, a2, a1] self.assertEqual(b.slm.count(a1), 2) - def testSchemaListPropertyExtend(self): """SchemaListProperty extend method """ @@ -1183,11 +1128,10 @@ class B(Document): self.assertEqual([b.slm[0].s, b.slm[1].s], [a1.s, a2.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}] }) - def testSchemaListPropertyIndex(self): """SchemaListProperty index method """ @@ -1211,8 +1155,10 @@ class B(Document): self.assertEqual(b.slm.index(a1, 1, -2), 2) with self.assertRaises(ValueError) as cm: b.slm.index(a3) - self.assertEqual(str(cm.exception), 'list.index(x): x not in list') - + self.assertIn(str(cm.exception), ( + '{0!r} is not in list'.format(a3), + 'list.index(x): x not in list' + )) def testSchemaListPropertyInsert(self): """SchemaListProperty insert method @@ -1237,12 +1183,11 @@ class B(Document): [b.slm[0].s, b.slm[1].s, b.slm[2].s], [a1.s, a2.s, a3.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}, - {'doc_type': 'A', 's': unicode(a3.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}, + {'doc_type': 'A', 's': six.text_type(a3.s)}] }) - def testSchemaListPropertyPop(self): """SchemaListProperty pop method """ @@ -1266,8 +1211,8 @@ class B(Document): self.assertEqual([b.slm[0].s, b.slm[1].s], [a1.s, a2.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}] }) v = b.slm.pop(0) self.assertEqual(v.s, a1.s) @@ -1275,10 +1220,9 @@ class B(Document): self.assertEqual(b.slm[0].s, a2.s) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a2.s)}] }) - def testSchemaListPropertyRemove(self): """SchemaListProperty remove method """ @@ -1299,12 +1243,14 @@ class B(Document): self.assertEqual(b.slm[0].s, a2.s) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a2.s)}] }) with self.assertRaises(ValueError) as cm: b.slm.remove(a1) - self.assertEqual(str(cm.exception), 'list.remove(x): x not in list') - + self.assertIn(str(cm.exception), ( + '{0!r} is not in list'.format(a1), + 'list.index(x): x not in list' + )) def testSchemaListPropertyReverse(self): """SchemaListProperty reverse method @@ -1325,11 +1271,10 @@ class B(Document): self.assertEqual([b.slm[0].s, b.slm[1].s], [a2.s, a1.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a2.s)}, - {'doc_type': 'A', 's': unicode(a1.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a2.s)}, + {'doc_type': 'A', 's': six.text_type(a1.s)}] }) - def testSchemaListPropertySort(self): """SchemaListProperty sort method """ @@ -1349,25 +1294,24 @@ class B(Document): self.assertEqual([b.slm[0].s, b.slm[1].s], [a1.s, a2.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}] }) b.slm.sort(key=lambda item: item['s'], reverse=True) self.assertEqual([b.slm[0].s, b.slm[1].s], [a2.s, a1.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a2.s)}, - {'doc_type': 'A', 's': unicode(a1.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a2.s)}, + {'doc_type': 'A', 's': six.text_type(a1.s)}] }) b.slm.sort(cmp=lambda x, y: cmp(x['s'].lower(), y['s'].lower())) self.assertEqual([b.slm[0].s, b.slm[1].s], [a1.s, a2.s]) self.assertEqual(b._doc, { 'doc_type': 'B', - 'slm': [{'doc_type': 'A', 's': unicode(a1.s)}, - {'doc_type': 'A', 's': unicode(a2.s)}] + 'slm': [{'doc_type': 'A', 's': six.text_type(a1.s)}, + {'doc_type': 'A', 's': six.text_type(a2.s)}] }) - def testSchemaDictProperty(self): class A(DocumentSchema): i = IntegerProperty() @@ -1396,7 +1340,6 @@ class B(Document): self.assert_(b1.d['v1'].i == 123) self.assert_(b1.d[23].i == 42) - def testListProperty(self): from datetime import datetime class A(Document): @@ -1416,27 +1359,12 @@ class A(Document): self.assert_(len(a.l) == 1) self.assert_(a.l[0] == datetime(2009, 4, 13, 22, 56, 10)) self.assert_(a._doc == {'doc_type': 'A', 'l': ['2009-04-13T22:56:10Z']}) - a.l.append({ 's': "test"}) - self.assert_(a.l == [datetime(2009, 4, 13, 22, 56, 10), {'s': 'test'}]) - self.assert_(a._doc == {'doc_type': 'A', 'l': ['2009-04-13T22:56:10Z', {'s': 'test'}]} - ) - a.save() b = A.get(a._id) - self.assert_(len(b.l) == 2) + self.assert_(len(b.l) == 1) self.assert_(b.l[0] == datetime(2009, 4, 13, 22, 56, 10)) - self.assert_(b._doc['l'] == ['2009-04-13T22:56:10Z', {'s': 'test'}]) - - - a = A(l=["a", "b", "c"]) - a.save() - b = self.db.get(a._id, wrapper=A.wrap) - self.assert_(a.l == ["a", "b", "c"]) - b.l = [] - self.assert_(b.l == []) - self.assert_(b.to_json()['l'] == []) - + self.assert_(b._doc['l'] == ['2009-04-13T22:56:10Z']) def testListPropertyNotEmpty(self): from datetime import datetime @@ -1448,7 +1376,7 @@ class A(Document): self.assertRaises(BadValueError, a.save) try: a.validate() - except BadValueError, e: + except BadValueError as e: pass self.assert_(str(e) == 'Property l is required.') @@ -1461,27 +1389,25 @@ class A(Document): class A2(Document): l = ListProperty() - a2 = A2() - self.assertTrue(a2.validate(required=False)) - self.assertTrue(a2.validate()) - + self.assertIsNone(a2.validate(required=False)) + self.assertIsNone(a2.validate()) def testListPropertyWithType(self): from datetime import datetime class A(Document): l = ListProperty(item_type=datetime) a = A() - a.l.append("test") - self.assertRaises(BadValueError, a.validate) + with self.assertRaises(BadValueError): + a.l.append("test") class B(Document): ls = StringListProperty() b = B() b.ls.append(u"test") - self.assertTrue(b.validate()) - b.ls.append(datetime.utcnow()) - self.assertRaises(BadValueError, b.validate) + self.assertIsNone(b.validate()) + with self.assertRaises(BadValueError): + b.ls.append(datetime.utcnow()) b1 = B() b1.ls = [u'hello', u'123'] @@ -1492,7 +1418,6 @@ class B(Document): b1.ls.remove(u'hello') self.assert_(u'hello' not in b1.ls) - def testListPropertyExtend(self): """list extend method for property w/o type """ @@ -1504,7 +1429,6 @@ class A(Document): self.assert_(a.l == [42, 24]) self.assert_(a._doc == {'doc_type': 'A', 'l': [42, 24]}) - def testListPropertyExtendWithType(self): """list extend method for property w/ type """ @@ -1522,7 +1446,6 @@ class A(Document): 'l': ['2011-03-11T21:31:01Z', '2011-11-03T13:12:02Z'] }) - def testListPropertyInsert(self): """list insert method for property w/o type """ @@ -1535,7 +1458,6 @@ class A(Document): self.assertEqual(a.l, [42, 4224, 24]) self.assertEqual(a._doc, {'doc_type': 'A', 'l': [42, 4224, 24]}) - def testListPropertyInsertWithType(self): """list insert method for property w/ type """ @@ -1557,7 +1479,6 @@ class A(Document): '2010-01-12T03:02:03Z'] }) - def testListPropertyPop(self): """list pop method for property w/o type """ @@ -1575,7 +1496,6 @@ class A(Document): self.assert_(a.l == [24]) self.assert_(a._doc == {'doc_type': 'A', 'l': [24]}) - def testListPropertyPopWithType(self): """list pop method for property w/ type """ @@ -1592,7 +1512,6 @@ class A(Document): self.assertEqual(v, d3) self.assertEqual(a.l, [d1, d2]) - def testDictProperty(self): from datetime import datetime class A(Document): @@ -1668,7 +1587,7 @@ class A(Document): self.assertRaises(BadValueError, a.save) try: a.save() - except BadValueError, e: + except BadValueError as e: pass self.assert_(str(e) == 'Property d is required.') @@ -1681,8 +1600,8 @@ class A(Document): class A2(Document): d = DictProperty() a2 = A2() - self.assertTrue(a2.validate(required=False)) - self.assertTrue(a2.validate()) + self.assertIsNone(a2.validate(required=False)) + self.assertIsNone(a2.validate()) def testDynamicDictProperty(self): from datetime import datetime @@ -1693,7 +1612,7 @@ class A(Document): a.d = {} a.d['test'] = { 'a': datetime(2009, 5, 10, 21, 19, 21, 127380) } - self.assert_(a.d == {'test': {'a': datetime(2009, 5, 10, 21, 19, 21, 127380)}}) + self.assert_(a.d == {'test': {'a': datetime(2009, 5, 10, 21, 19, 21)}}) self.assert_(a._doc == {'d': {'test': {'a': '2009-05-10T21:19:21Z'}}, 'doc_type': 'A'} ) a.d['test']['b'] = "essai" @@ -1701,7 +1620,7 @@ class A(Document): a.d['essai'] = "test" self.assert_(a.d == {'essai': 'test', - 'test': {'a': datetime(2009, 5, 10, 21, 19, 21, 127380), + 'test': {'a': datetime(2009, 5, 10, 21, 19, 21), 'b': 'essai'}} ) self.assert_(a._doc == {'d': {'essai': 'test', 'test': {'a': '2009-05-10T21:19:21Z', 'b': 'essai'}}, @@ -1714,7 +1633,7 @@ class A(Document): a.d['test']['essai'] = { "a": datetime(2009, 5, 10, 21, 21, 11, 425782) } self.assert_(a.d == {'essai': 'test', 'test': {'b': 'essai', - 'essai': {'a': datetime(2009, 5, 10, 21, 21, 11, 425782)}}} + 'essai': {'a': datetime(2009, 5, 10, 21, 21, 11)}}} ) self.assert_(a._doc == {'d': {'essai': 'test', 'test': {'b': 'essai', 'essai': {'a': '2009-05-10T21:21:11Z'}}}, @@ -1785,7 +1704,7 @@ class A(Document): a.l.append(1) a.l.append(datetime(2009, 5, 12, 13, 35, 9, 425701)) a.l.append({ 's': "test"}) - self.assert_(a.l == [1, datetime(2009, 5, 12, 13, 35, 9, 425701), {'s': 'test'}]) + self.assert_(a.l == [1, datetime(2009, 5, 12, 13, 35, 9), {'s': 'test'}]) self.assert_(a._doc == {'doc_type': 'A', 'l': [1, '2009-05-12T13:35:09Z', {'s': 'test'}]} ) a.l[2]['date'] = datetime(2009, 5, 12, 13, 35, 9, 425701) @@ -1804,8 +1723,8 @@ class A(Document): a.l[2]['s'] = 'test edited' self.assert_(a.l == [1, - datetime(2009, 5, 12, 13, 35, 9, 425701), - {'date': datetime(2009, 5, 12, 13, 35, 9, 425701), + datetime(2009, 5, 12, 13, 35, 9), + {'date': datetime(2009, 5, 12, 13, 35, 9), 's': 'test edited'}] ) self.assert_(a._doc['l'] == [1, @@ -1861,7 +1780,6 @@ class A2(Document): self.assert_(b.to_json()['d'] == {}) - if support_setproperty: class SetPropertyTestCase(unittest.TestCase): def testSetPropertyConstructor(self): @@ -1881,7 +1799,7 @@ class B(Document): class C(Document): s = SetProperty(item_type=tuple) self.assertIn( - "item_type not in set([", str(cm.exception)) + "item_type not in ", str(cm.exception)) def testSetPropertyAssignment(self): @@ -2203,17 +2121,5 @@ class A(Document): self.assertEqual(a._doc['s'], []) - -class SchemaProxyUtilityTestCase(unittest.TestCase): - def test_svalue_to_json_instance(self): - from couchdbkit.schema.properties_proxy import svalue_to_json - - svalue_to_json({}, Document(), True) - - def test_svalue_to_json_schema(self): - from couchdbkit.schema.properties_proxy import svalue_to_json - - svalue_to_json({}, Document, False) - if __name__ == '__main__': unittest.main()