diff --git a/TouchDB-Android-Ektorp/.classpath b/TouchDB-Android-Ektorp/.classpath index 1b0421b..ce767aa 100644 --- a/TouchDB-Android-Ektorp/.classpath +++ b/TouchDB-Android-Ektorp/.classpath @@ -1,12 +1,11 @@ - - - - - - - + + + + + + diff --git a/TouchDB-Android-Ektorp/libs/org.ektorp-1.2.2.jar b/TouchDB-Android-Ektorp/libs/org.ektorp-1.2.2.jar deleted file mode 100644 index a3909ee..0000000 Binary files a/TouchDB-Android-Ektorp/libs/org.ektorp-1.2.2.jar and /dev/null differ diff --git a/TouchDB-Android-Ektorp/libs/org.ektorp.android-1.2.2.jar b/TouchDB-Android-Ektorp/libs/org.ektorp.android-1.2.2.jar deleted file mode 100644 index 1ae7e48..0000000 Binary files a/TouchDB-Android-Ektorp/libs/org.ektorp.android-1.2.2.jar and /dev/null differ diff --git a/TouchDB-Android-Ektorp/lint.xml b/TouchDB-Android-Ektorp/lint.xml new file mode 100644 index 0000000..ee0eead --- /dev/null +++ b/TouchDB-Android-Ektorp/lint.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/TouchDB-Android-Ektorp/project.properties b/TouchDB-Android-Ektorp/project.properties index 63f16dd..eda01e1 100644 --- a/TouchDB-Android-Ektorp/project.properties +++ b/TouchDB-Android-Ektorp/project.properties @@ -8,6 +8,6 @@ # project structure. # Project target. -target=android-17 +target=Google Inc.:Google APIs:15 android.library=true android.library.reference.1=../TouchDB-Android diff --git a/TouchDB-Android-Listener/.classpath b/TouchDB-Android-Listener/.classpath index cecf57d..dd64e62 100644 --- a/TouchDB-Android-Listener/.classpath +++ b/TouchDB-Android-Listener/.classpath @@ -6,5 +6,6 @@ + diff --git a/TouchDB-Android/.classpath b/TouchDB-Android/.classpath index 462ef5c..374d658 100644 --- a/TouchDB-Android/.classpath +++ b/TouchDB-Android/.classpath @@ -1,10 +1,13 @@ - - - - + + + + + + + diff --git a/TouchDB-Android/.settings/org.hibernate.eclipse.console.prefs b/TouchDB-Android/.settings/org.hibernate.eclipse.console.prefs new file mode 100644 index 0000000..1f6dcec --- /dev/null +++ b/TouchDB-Android/.settings/org.hibernate.eclipse.console.prefs @@ -0,0 +1,4 @@ +#Sat Mar 30 13:04:34 GMT+05:30 2013 +default.configuration= +eclipse.preferences.version=1 +hibernate3.enabled=false diff --git a/TouchDB-Android/project.properties b/TouchDB-Android/project.properties index e61077f..4c10f5c 100644 --- a/TouchDB-Android/project.properties +++ b/TouchDB-Android/project.properties @@ -8,5 +8,5 @@ # project structure. # Project target. -target=android-17 +target=Google Inc.:Google APIs:15 android.library=true diff --git a/TouchDB-Android/src/com/couchbase/touchdb/TDDatabase.java b/TouchDB-Android/src/com/couchbase/touchdb/TDDatabase.java index 7fe8555..aefa66b 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/TDDatabase.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/TDDatabase.java @@ -23,6 +23,7 @@ import java.io.InputStream; import java.net.URL; import java.util.ArrayList; +import java.util.Date; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -35,6 +36,7 @@ import android.content.ContentValues; import android.database.Cursor; import android.database.SQLException; +import android.database.sqlite.SQLiteConstraintException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.util.Log; @@ -52,2462 +54,2793 @@ */ public class TDDatabase extends Observable { - private String path; - private String name; - private SQLiteDatabase database; - private boolean open = false; - private int transactionLevel = 0; - public static final String TAG = "TDDatabase"; - - private Map views; - private Map filters; - private Map validations; - private List activeReplicators; - private TDBlobStore attachments; - - /** - * Options for what metadata to include in document bodies - */ - public enum TDContentOptions { - TDIncludeAttachments, TDIncludeConflicts, TDIncludeRevs, TDIncludeRevsInfo, TDIncludeLocalSeq, TDNoBody - } - - private static final Set KNOWN_SPECIAL_KEYS; - - static { - KNOWN_SPECIAL_KEYS = new HashSet(); - KNOWN_SPECIAL_KEYS.add("_id"); - KNOWN_SPECIAL_KEYS.add("_rev"); - KNOWN_SPECIAL_KEYS.add("_attachments"); - KNOWN_SPECIAL_KEYS.add("_deleted"); - KNOWN_SPECIAL_KEYS.add("_revisions"); - KNOWN_SPECIAL_KEYS.add("_revs_info"); - KNOWN_SPECIAL_KEYS.add("_conflicts"); - KNOWN_SPECIAL_KEYS.add("_deleted_conflicts"); - } - - public static final String SCHEMA = "" + - "CREATE TABLE docs ( " + - " doc_id INTEGER PRIMARY KEY, " + - " docid TEXT UNIQUE NOT NULL); " + - " CREATE INDEX docs_docid ON docs(docid); " + - " CREATE TABLE revs ( " + - " sequence INTEGER PRIMARY KEY AUTOINCREMENT, " + - " doc_id INTEGER NOT NULL REFERENCES docs(doc_id) ON DELETE CASCADE, " + - " revid TEXT NOT NULL, " + - " parent INTEGER REFERENCES revs(sequence) ON DELETE SET NULL, " + - " current BOOLEAN, " + - " deleted BOOLEAN DEFAULT 0, " + - " json BLOB); " + - " CREATE INDEX revs_by_id ON revs(revid, doc_id); " + - " CREATE INDEX revs_current ON revs(doc_id, current); " + - " CREATE INDEX revs_parent ON revs(parent); " + - " CREATE TABLE localdocs ( " + - " docid TEXT UNIQUE NOT NULL, " + - " revid TEXT NOT NULL, " + - " json BLOB); " + - " CREATE INDEX localdocs_by_docid ON localdocs(docid); " + - " CREATE TABLE views ( " + - " view_id INTEGER PRIMARY KEY, " + - " name TEXT UNIQUE NOT NULL," + - " version TEXT, " + - " lastsequence INTEGER DEFAULT 0); " + - " CREATE INDEX views_by_name ON views(name); " + - " CREATE TABLE maps ( " + - " view_id INTEGER NOT NULL REFERENCES views(view_id) ON DELETE CASCADE, " + - " sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, " + - " key TEXT NOT NULL COLLATE JSON, " + - " value TEXT); " + - " CREATE INDEX maps_keys on maps(view_id, key COLLATE JSON); " + - " CREATE TABLE attachments ( " + - " sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, " + - " filename TEXT NOT NULL, " + - " key BLOB NOT NULL, " + - " type TEXT, " + - " length INTEGER NOT NULL, " + - " revpos INTEGER DEFAULT 0); " + - " CREATE INDEX attachments_by_sequence on attachments(sequence, filename); " + - " CREATE TABLE replicators ( " + - " remote TEXT NOT NULL, " + - " push BOOLEAN, " + - " last_sequence TEXT, " + - " UNIQUE (remote, push)); " + - " PRAGMA user_version = 3"; // at the end, update user_version - - /*************************************************************************************************/ - /*** TDDatabase ***/ - /*************************************************************************************************/ - - public String getAttachmentStorePath() { - String attachmentStorePath = path; - int lastDotPosition = attachmentStorePath.lastIndexOf('.'); - if( lastDotPosition > 0 ) { - attachmentStorePath = attachmentStorePath.substring(0, lastDotPosition); - } - attachmentStorePath = attachmentStorePath + File.separator + "attachments"; - return attachmentStorePath; - } - - public static TDDatabase createEmptyDBAtPath(String path) { - if(!FileDirUtils.removeItemIfExists(path)) { - return null; - } - TDDatabase result = new TDDatabase(path); - File af = new File(result.getAttachmentStorePath()); - //recursively delete attachments path - if(!FileDirUtils.deleteRecursive(af)) { - return null; - } - if(!result.open()) { - return null; - } - return result; - } - - public TDDatabase(String path) { - assert(path.startsWith("/")); //path must be absolute - this.path = path; - this.name = FileDirUtils.getDatabaseNameFromPath(path); - } - - public String toString() { - return this.getClass().getName() + "[" + path + "]"; - } - - public boolean exists() { - return new File(path).exists(); - } - - /** - * Replaces the database with a copy of another database. - * - * This is primarily used to install a canned database on first launch of an app, in which case you should first check .exists to avoid replacing the database if it exists already. The canned database would have been copied into your app bundle at build time. - * - * @param databasePath Path of the database file that should replace this one. - * @param attachmentsPath Path of the associated attachments directory, or nil if there are no attachments. - * @return true if the database was copied, IOException if an error occurs - **/ - public boolean replaceWithDatabase(String databasePath, String attachmentsPath) throws IOException { - String dstAttachmentsPath = this.getAttachmentStorePath(); - File sourceFile = new File(databasePath); - File destFile = new File(path); - FileDirUtils.copyFile(sourceFile, destFile); - File attachmentsFile = new File(dstAttachmentsPath); - FileDirUtils.deleteRecursive(attachmentsFile); - attachmentsFile.mkdirs(); - if(attachmentsPath != null) { - FileDirUtils.copyFolder(new File(attachmentsPath), attachmentsFile); - } - return true; - } - - public boolean initialize(String statements) { - try { - for (String statement : statements.split(";")) { - database.execSQL(statement); - } - } catch (SQLException e) { - close(); - return false; - } - return true; - } - - public boolean open() { - if(open) { - return true; - } - - try { - database = SQLiteDatabase.openDatabase(path, null, SQLiteDatabase.CREATE_IF_NECESSARY); - TDCollateJSON.registerCustomCollators(database); - } - catch(SQLiteException e) { - Log.e(TDDatabase.TAG, "Error opening", e); - return false; - } - - // Stuff we need to initialize every time the database opens: - if(!initialize("PRAGMA foreign_keys = ON;")) { - Log.e(TDDatabase.TAG, "Error turning on foreign keys"); - return false; - } - - // Check the user_version number we last stored in the database: - int dbVersion = database.getVersion(); - - // Incompatible version changes increment the hundreds' place: - if(dbVersion >= 100) { - Log.w(TDDatabase.TAG, "TDDatabase: Database version (" + dbVersion + ") is newer than I know how to work with"); - database.close(); - return false; - } - - if(dbVersion < 1) { - // First-time initialization: - // (Note: Declaring revs.sequence as AUTOINCREMENT means the values will always be - // monotonically increasing, never reused. See ) - if(!initialize(SCHEMA)) { - database.close(); - return false; - } - dbVersion = 3; - } - - if (dbVersion < 2) { - // Version 2: added attachments.revpos - String upgradeSql = "ALTER TABLE attachments ADD COLUMN revpos INTEGER DEFAULT 0; " + - "PRAGMA user_version = 2"; - if(!initialize(upgradeSql)) { - database.close(); - return false; - } - dbVersion = 2; - } - - if (dbVersion < 3) { - String upgradeSql = "CREATE TABLE localdocs ( " + - "docid TEXT UNIQUE NOT NULL, " + - "revid TEXT NOT NULL, " + - "json BLOB); " + - "CREATE INDEX localdocs_by_docid ON localdocs(docid); " + - "PRAGMA user_version = 3"; - if(!initialize(upgradeSql)) { - database.close(); - return false; - } - dbVersion = 3; - } - - if (dbVersion < 4) { - String upgradeSql = "CREATE TABLE info ( " + - "key TEXT PRIMARY KEY, " + - "value TEXT); " + - "INSERT INTO INFO (key, value) VALUES ('privateUUID', '" + TDMisc.TDCreateUUID() + "'); " + - "INSERT INTO INFO (key, value) VALUES ('publicUUID', '" + TDMisc.TDCreateUUID() + "'); " + - "PRAGMA user_version = 4"; - if(!initialize(upgradeSql)) { - database.close(); - return false; - } - } - - try { - attachments = new TDBlobStore(getAttachmentStorePath()); - } catch (IllegalArgumentException e) { - Log.e(TDDatabase.TAG, "Could not initialize attachment store", e); - database.close(); - return false; - } - - open = true; - return true; - } - - public boolean close() { - if(!open) { - return false; - } - - if(views != null) { - for (TDView view : views.values()) { - view.databaseClosing(); - } - } - views = null; - - if(activeReplicators != null) { - for(TDReplicator replicator : activeReplicators) { - replicator.databaseClosing(); - } - activeReplicators = null; - } - - if(database != null && database.isOpen()) { - database.close(); - } - open = false; - transactionLevel = 0; - return true; - } - - public boolean deleteDatabase() { - if(open) { - if(!close()) { - return false; - } - } - else if(!exists()) { - return true; - } - File file = new File(path); - File attachmentsFile = new File(getAttachmentStorePath()); - - boolean deleteStatus = file.delete(); - //recursively delete attachments path - boolean deleteAttachmentStatus = FileDirUtils.deleteRecursive(attachmentsFile); - return deleteStatus && deleteAttachmentStatus; - } - - public String getPath() { - return path; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - // Leave this package protected, so it can only be used - // TDView uses this accessor - SQLiteDatabase getDatabase() { - return database; - } - - public TDBlobStore getAttachments() { - return attachments; - } - - public long totalDataSize() { - File f = new File(path); - long size = f.length() + attachments.totalDataSize(); - return size; - } - - /** - * Begins a database transaction. Transactions can nest. - * Every beginTransaction() must be balanced by a later endTransaction() - */ - public boolean beginTransaction() { - try { - database.beginTransaction(); - ++transactionLevel; - //Log.v(TAG, "Begin transaction (level " + Integer.toString(transactionLevel) + ")..."); - } catch (SQLException e) { - return false; - } - return true; - } - - /** - * Commits or aborts (rolls back) a transaction. - * - * @param commit If true, commits; if false, aborts and rolls back, undoing all changes made since the matching -beginTransaction call, *including* any committed nested transactions. - */ - public boolean endTransaction(boolean commit) { - assert(transactionLevel > 0); - - if(commit) { - //Log.v(TAG, "Committing transaction (level " + Integer.toString(transactionLevel) + ")..."); - database.setTransactionSuccessful(); - database.endTransaction(); - } - else { - Log.v(TAG, "CANCEL transaction (level " + Integer.toString(transactionLevel) + ")..."); - try { - database.endTransaction(); - } catch (SQLException e) { - return false; - } - } - - --transactionLevel; - return true; - } - - /** - * Compacts the database storage by removing the bodies and attachments of obsolete revisions. - */ - public TDStatus compact() { - // Can't delete any rows because that would lose revision tree history. - // But we can remove the JSON of non-current revisions, which is most of the space. - try { - Log.v(TDDatabase.TAG, "Deleting JSON of old revisions..."); - ContentValues args = new ContentValues(); - args.put("json", (String)null); - database.update("revs", args, "current=0", null); - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error compacting", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - Log.v(TDDatabase.TAG, "Deleting old attachments..."); - TDStatus result = garbageCollectAttachments(); - - Log.v(TDDatabase.TAG, "Vacuuming SQLite database..."); - try { - database.execSQL("VACUUM"); - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error vacuuming database", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - return result; - } - - public String privateUUID() { - String result = null; - Cursor cursor = null; - try { - cursor = database.rawQuery("SELECT value FROM info WHERE key='privateUUID'", null); - if(cursor.moveToFirst()) { - result = cursor.getString(0); - } - } catch(SQLException e) { - Log.e(TAG, "Error querying privateUUID", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - public String publicUUID() { - String result = null; - Cursor cursor = null; - try { - cursor = database.rawQuery("SELECT value FROM info WHERE key='publicUUID'", null); - if(cursor.moveToFirst()) { - result = cursor.getString(0); - } - } catch(SQLException e) { - Log.e(TAG, "Error querying privateUUID", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - /** GETTING DOCUMENTS: **/ - - public int getDocumentCount() { - String sql = "SELECT COUNT(DISTINCT doc_id) FROM revs WHERE current=1 AND deleted=0"; - Cursor cursor = null; - int result = 0; - try { - cursor = database.rawQuery(sql, null); - if(cursor.moveToFirst()) { - result = cursor.getInt(0); - } - } catch(SQLException e) { - Log.e(TDDatabase.TAG, "Error getting document count", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - public long getLastSequence() { - String sql = "SELECT MAX(sequence) FROM revs"; - Cursor cursor = null; - long result = 0; - try { - cursor = database.rawQuery(sql, null); - if(cursor.moveToFirst()) { - result = cursor.getLong(0); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting last sequence", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - /** Splices the contents of an NSDictionary into JSON data (that already represents a dict), without parsing the JSON. */ - public byte[] appendDictToJSON(byte[] json, Map dict) { - if(dict.size() == 0) { - return json; - } - - byte[] extraJSON = null; - try { - extraJSON = TDServer.getObjectMapper().writeValueAsBytes(dict); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error convert extra JSON to bytes", e); - return null; - } - - int jsonLength = json.length; - int extraLength = extraJSON.length; - if(jsonLength == 2) { // Original JSON was empty - return extraJSON; - } - byte[] newJson = new byte[jsonLength + extraLength - 1]; - System.arraycopy(json, 0, newJson, 0, jsonLength - 1); // Copy json w/o trailing '}' - newJson[jsonLength - 1] = ','; // Add a ',' - System.arraycopy(extraJSON, 1, newJson, jsonLength, extraLength - 1); - return newJson; - } - - /** Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. - Rev must already have its revID and sequence properties set. */ - public Map extraPropertiesForRevision(TDRevision rev, EnumSet contentOptions) { - - String docId = rev.getDocId(); - String revId = rev.getRevId(); - long sequenceNumber = rev.getSequence(); - assert(revId != null); - assert(sequenceNumber > 0); - - // Get attachment metadata, and optionally the contents: - boolean withAttachments = contentOptions.contains(TDContentOptions.TDIncludeAttachments); - Map attachmentsDict = getAttachmentsDictForSequenceWithContent(sequenceNumber, withAttachments); - - // Get more optional stuff to put in the properties: - //OPT: This probably ends up making redundant SQL queries if multiple options are enabled. - Long localSeq = null; - if(contentOptions.contains(TDContentOptions.TDIncludeLocalSeq)) { - localSeq = sequenceNumber; - } - - Map revHistory = null; - if(contentOptions.contains(TDContentOptions.TDIncludeRevs)) { - revHistory = getRevisionHistoryDict(rev); - } - - List revsInfo = null; - if(contentOptions.contains(TDContentOptions.TDIncludeRevsInfo)) { - revsInfo = new ArrayList(); - List revHistoryFull = getRevisionHistory(rev); - for (TDRevision historicalRev : revHistoryFull) { - Map revHistoryItem = new HashMap(); - String status = "available"; - if(historicalRev.isDeleted()) { - status = "deleted"; - } - // TODO: Detect missing revisions, set status="missing" - revHistoryItem.put("rev", historicalRev.getRevId()); - revHistoryItem.put("status", status); - revsInfo.add(revHistoryItem); - } - } - - List conflicts = null; - if(contentOptions.contains(TDContentOptions.TDIncludeConflicts)) { - TDRevisionList revs = getAllRevisionsOfDocumentID(docId, true); - if(revs.size() > 1) { - conflicts = new ArrayList(); - for (TDRevision historicalRev : revs) { - if(!historicalRev.equals(rev)) { - conflicts.add(historicalRev.getRevId()); - } - } - } - } - - Map result = new HashMap(); - result.put("_id", docId); - result.put("_rev", revId); - if(rev.isDeleted()) { - result.put("_deleted", true); - } - if(attachmentsDict != null) { - result.put("_attachments", attachmentsDict); - } - if(localSeq != null) { - result.put("_local_seq", localSeq); - } - if(revHistory != null) { - result.put("_revisions", revHistory); - } - if(revsInfo != null) { - result.put("_revs_info", revsInfo); - } - if(conflicts != null) { - result.put("_conflicts", conflicts); - } - - return result; - } - - /** Inserts the _id, _rev and _attachments properties into the JSON data and stores it in rev. - Rev must already have its revID and sequence properties set. */ - public void expandStoredJSONIntoRevisionWithAttachments(byte[] json, TDRevision rev, EnumSet contentOptions) { - Map extra = extraPropertiesForRevision(rev, contentOptions); - if(json != null) { - rev.setJson(appendDictToJSON(json, extra)); - } - else { - rev.setProperties(extra); - } - } - - @SuppressWarnings("unchecked") - public Map documentPropertiesFromJSON(byte[] json, String docId, String revId, long sequence, EnumSet contentOptions) { - - TDRevision rev = new TDRevision(docId, revId, false); - rev.setSequence(sequence); - Map extra = extraPropertiesForRevision(rev, contentOptions); - if(json == null) { - return extra; - } - - Map docProperties = null; - try { - docProperties = TDServer.getObjectMapper().readValue(json, Map.class); - docProperties.putAll(extra); - return docProperties; - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error serializing properties to JSON", e); - } - - return docProperties; - } - - public TDRevision getDocumentWithIDAndRev(String id, String rev, EnumSet contentOptions) { - TDRevision result = null; - String sql; - - Cursor cursor = null; - try { - cursor = null; - String cols = "revid, deleted, sequence"; - if(!contentOptions.contains(TDContentOptions.TDNoBody)) { - cols += ", json"; - } - if(rev != null) { - sql = "SELECT " + cols + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id AND revid=? LIMIT 1"; - String[] args = {id, rev}; - cursor = database.rawQuery(sql, args); - } - else { - sql = "SELECT " + cols + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id and current=1 and deleted=0 ORDER BY revid DESC LIMIT 1"; - String[] args = {id}; - cursor = database.rawQuery(sql, args); - } - - if(cursor.moveToFirst()) { - if(rev == null) { - rev = cursor.getString(0); - } - boolean deleted = (cursor.getInt(1) > 0); - result = new TDRevision(id, rev, deleted); - result.setSequence(cursor.getLong(2)); - if(!contentOptions.equals(EnumSet.of(TDContentOptions.TDNoBody))) { - byte[] json = null; - if(!contentOptions.contains(TDContentOptions.TDNoBody)) { - json = cursor.getBlob(3); - } - expandStoredJSONIntoRevisionWithAttachments(json, result, contentOptions); - } - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting document with id and rev", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - public boolean existsDocumentWithIDAndRev(String docId, String revId) { - return getDocumentWithIDAndRev(docId, revId, EnumSet.of(TDContentOptions.TDNoBody)) != null; - } - - public TDStatus loadRevisionBody(TDRevision rev, EnumSet contentOptions) { - if(rev.getBody() != null) { - return new TDStatus(TDStatus.OK); - } - assert((rev.getDocId() != null) && (rev.getRevId() != null)); - - Cursor cursor = null; - TDStatus result = new TDStatus(TDStatus.NOT_FOUND); - try { - String sql = "SELECT sequence, json FROM revs, docs WHERE revid=? AND docs.docid=? AND revs.doc_id=docs.doc_id LIMIT 1"; - String[] args = { rev.getRevId(), rev.getDocId()}; - cursor = database.rawQuery(sql, args); - if(cursor.moveToFirst()) { - result.setCode(TDStatus.OK); - rev.setSequence(cursor.getLong(0)); - expandStoredJSONIntoRevisionWithAttachments(cursor.getBlob(1), rev, contentOptions); - } - } catch(SQLException e) { - Log.e(TDDatabase.TAG, "Error loading revision body", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - public long getDocNumericID(String docId) { - Cursor cursor = null; - String[] args = { docId }; - - long result = -1; - try { - cursor = database.rawQuery("SELECT doc_id FROM docs WHERE docid=?", args); - - if(cursor.moveToFirst()) { - result = cursor.getLong(0); - } - else { - result = 0; - } - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error getting doc numeric id", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - /** HISTORY: **/ - - /** - * Returns all the known revisions (or all current/conflicting revisions) of a document. - */ - public TDRevisionList getAllRevisionsOfDocumentID(String docId, long docNumericID, boolean onlyCurrent) { - - String sql = null; - if(onlyCurrent) { - sql = "SELECT sequence, revid, deleted FROM revs " + - "WHERE doc_id=? AND current ORDER BY sequence DESC"; - } - else { - sql = "SELECT sequence, revid, deleted FROM revs " + - "WHERE doc_id=? ORDER BY sequence DESC"; - } - - String[] args = { Long.toString(docNumericID) }; - Cursor cursor = null; - - cursor = database.rawQuery(sql, args); - - TDRevisionList result; - try { - cursor.moveToFirst(); - result = new TDRevisionList(); - while(!cursor.isAfterLast()) { - TDRevision rev = new TDRevision(docId, cursor.getString(1), (cursor.getInt(2) > 0)); - rev.setSequence(cursor.getLong(0)); - result.add(rev); - cursor.moveToNext(); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - public TDRevisionList getAllRevisionsOfDocumentID(String docId, boolean onlyCurrent) { - long docNumericId = getDocNumericID(docId); - if(docNumericId < 0) { - return null; - } - else if(docNumericId == 0) { - return new TDRevisionList(); - } - else { - return getAllRevisionsOfDocumentID(docId, docNumericId, onlyCurrent); - } - } - - public List getConflictingRevisionIDsOfDocID(String docID) { - long docIdNumeric = getDocNumericID(docID); - if(docIdNumeric < 0) { - return null; - } - - List result = new ArrayList(); - Cursor cursor = null; - try { - String[] args = { Long.toString(docIdNumeric) }; - cursor = database.rawQuery("SELECT revid FROM revs WHERE doc_id=? AND current " + - "ORDER BY revid DESC OFFSET 1", args); - cursor.moveToFirst(); - while(!cursor.isAfterLast()) { - result.add(cursor.getString(0)); - cursor.moveToNext(); - } - - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - public String findCommonAncestorOf(TDRevision rev, List revIDs) { - String result = null; - - if (revIDs.size() == 0) - return null; - String docId = rev.getDocId(); - long docNumericID = getDocNumericID(docId); - if (docNumericID <= 0) - return null; - String quotedRevIds = joinQuoted(revIDs); - String sql = "SELECT revid FROM revs " + - "WHERE doc_id=? and revid in (" + quotedRevIds + ") and revid <= ? " + - "ORDER BY revid DESC LIMIT 1"; - String[] args = { Long.toString(docNumericID) }; - - Cursor cursor = null; - try { - cursor = database.rawQuery(sql, args); - cursor.moveToFirst(); - if(!cursor.isAfterLast()) { - result = cursor.getString(0); - } - - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - /** - * Returns an array of TDRevs in reverse chronological order, starting with the given revision. - */ - public List getRevisionHistory(TDRevision rev) { - String docId = rev.getDocId(); - String revId = rev.getRevId(); - assert((docId != null) && (revId != null)); - - long docNumericId = getDocNumericID(docId); - if(docNumericId < 0) { - return null; - } - else if(docNumericId == 0) { - return new ArrayList(); - } - - String sql = "SELECT sequence, parent, revid, deleted FROM revs " + - "WHERE doc_id=? ORDER BY sequence DESC"; - String[] args = { Long.toString(docNumericId) }; - Cursor cursor = null; - - List result; - try { - cursor = database.rawQuery(sql, args); - - cursor.moveToFirst(); - long lastSequence = 0; - result = new ArrayList(); - while(!cursor.isAfterLast()) { - long sequence = cursor.getLong(0); - boolean matches = false; - if(lastSequence == 0) { - matches = revId.equals(cursor.getString(2)); - } - else { - matches = (sequence == lastSequence); - } - if(matches) { - revId = cursor.getString(2); - boolean deleted = (cursor.getInt(3) > 0); - TDRevision aRev = new TDRevision(docId, revId, deleted); - aRev.setSequence(cursor.getLong(0)); - result.add(aRev); - lastSequence = cursor.getLong(1); - if(lastSequence == 0) { - break; - } - } - cursor.moveToNext(); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting revision history", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - // Splits a revision ID into its generation number and opaque suffix string - public static int parseRevIDNumber(String rev) { - int result = -1; - int dashPos = rev.indexOf("-"); - if(dashPos >= 0) { - try { - result = Integer.parseInt(rev.substring(0, dashPos)); - } catch (NumberFormatException e) { - // ignore, let it return -1 - } - } - return result; - } - - // Splits a revision ID into its generation number and opaque suffix string - public static String parseRevIDSuffix(String rev) { - String result = null; - int dashPos = rev.indexOf("-"); - if(dashPos >= 0) { - result = rev.substring(dashPos + 1); - } - return result; - } - - public static Map makeRevisionHistoryDict(List history) { - if(history == null) { - return null; - } - - // Try to extract descending numeric prefixes: - List suffixes = new ArrayList(); - int start = -1; - int lastRevNo = -1; - for (TDRevision rev : history) { - int revNo = parseRevIDNumber(rev.getRevId()); - String suffix = parseRevIDSuffix(rev.getRevId()); - if(revNo > 0 && suffix.length() > 0) { - if(start < 0) { - start = revNo; - } - else if(revNo != lastRevNo - 1) { - start = -1; - break; - } - lastRevNo = revNo; - suffixes.add(suffix); - } - else { - start = -1; - break; - } - } - - Map result = new HashMap(); - if(start == -1) { - // we failed to build sequence, just stuff all the revs in list - suffixes = new ArrayList(); - for (TDRevision rev : history) { - suffixes.add(rev.getRevId()); - } - } - else { - result.put("start", start); - } - result.put("ids", suffixes); - - return result; - } - - /** - * Returns the revision history as a _revisions dictionary, as returned by the REST API's ?revs=true option. - */ - public Map getRevisionHistoryDict(TDRevision rev) { - return makeRevisionHistoryDict(getRevisionHistory(rev)); - } - - public TDRevisionList changesSince(long lastSeq, TDChangesOptions options, TDFilterBlock filter) { - // http://wiki.apache.org/couchdb/HTTP_database_API#Changes - if(options == null) { - options = new TDChangesOptions(); - } - - boolean includeDocs = options.isIncludeDocs() || (filter != null); - String additionalSelectColumns = ""; - if(includeDocs) { - additionalSelectColumns = ", json"; - } - - String sql = "SELECT sequence, revs.doc_id, docid, revid, deleted" + additionalSelectColumns + " FROM revs, docs " - + "WHERE sequence > ? AND current=1 " - + "AND revs.doc_id = docs.doc_id " - + "ORDER BY revs.doc_id, revid DESC"; - String[] args = {Long.toString(lastSeq)}; - Cursor cursor = null; - TDRevisionList changes = null; - - try { - cursor = database.rawQuery(sql, args); - cursor.moveToFirst(); - changes = new TDRevisionList(); - long lastDocId = 0; - while(!cursor.isAfterLast()) { - if(!options.isIncludeConflicts()) { - // Only count the first rev for a given doc (the rest will be losing conflicts): - long docNumericId = cursor.getLong(1); - if(docNumericId == lastDocId) { - cursor.moveToNext(); - continue; - } - lastDocId = docNumericId; - } - - TDRevision rev = new TDRevision(cursor.getString(2), cursor.getString(3), (cursor.getInt(4) > 0)); - rev.setSequence(cursor.getLong(0)); - if(includeDocs) { - expandStoredJSONIntoRevisionWithAttachments(cursor.getBlob(5), rev, options.getContentOptions()); - } - if((filter == null) || (filter.filter(rev))) { - changes.add(rev); - } - cursor.moveToNext(); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error looking for changes", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - - if(options.isSortBySequence()) { - changes.sortBySequence(); - } - changes.limit(options.getLimit()); - return changes; - } - - /** - * Define or clear a named filter function. - * - * These aren't used directly by TDDatabase, but they're looked up by TDRouter when a _changes request has a ?filter parameter. - */ - public void defineFilter(String filterName, TDFilterBlock filter) { - if(filters == null) { - filters = new HashMap(); - } - filters.put(filterName, filter); - } - - public TDFilterBlock getFilterNamed(String filterName) { - TDFilterBlock result = null; - if(filters != null) { - result = filters.get(filterName); - } - return result; - } - - /** VIEWS: **/ - - public TDView registerView(TDView view) { - if(view == null) { - return null; - } - if(views == null) { - views = new HashMap(); - } - views.put(view.getName(), view); - return view; - } - - public TDView getViewNamed(String name) { - TDView view = null; - if(views != null) { - view = views.get(name); - } - if(view != null) { - return view; - } - return registerView(new TDView(this, name)); - } - - public TDView getExistingViewNamed(String name) { - TDView view = null; - if(views != null) { - view = views.get(name); - } - if(view != null) { - return view; - } - view = new TDView(this, name); - if(view.getViewId() == 0) { - return null; - } - - return registerView(view); - } - - public List getAllViews() { - Cursor cursor = null; - List result = null; - - try { - cursor = database.rawQuery("SELECT name FROM views", null); - cursor.moveToFirst(); - result = new ArrayList(); - while(!cursor.isAfterLast()) { - result.add(getViewNamed(cursor.getString(0))); - cursor.moveToNext(); - } - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error getting all views", e); - } finally { - if(cursor != null) { - cursor.close(); - } - } - - return result; - } - - public TDStatus deleteViewNamed(String name) { - TDStatus result = new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - try { - String[] whereArgs = { name }; - int rowsAffected = database.delete("views", "name=?", whereArgs); - if(rowsAffected > 0) { - result.setCode(TDStatus.OK); - } - else { - result.setCode(TDStatus.NOT_FOUND); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error deleting view", e); - } - return result; - } - - //FIX: This has a lot of code in common with -[TDView queryWithOptions:status:]. Unify the two! - public Map getDocsWithIDs(List docIDs, TDQueryOptions options) { - if(options == null) { - options = new TDQueryOptions(); - } - - long updateSeq = 0; - if(options.isUpdateSeq()) { - updateSeq = getLastSequence(); // TODO: needs to be atomic with the following SELECT - } - - // Generate the SELECT statement, based on the options: - String additionalCols = ""; - if(options.isIncludeDocs()) { - additionalCols = ", json, sequence"; - } - String sql = "SELECT revs.doc_id, docid, revid, deleted" + additionalCols + " FROM revs, docs WHERE"; - - if(docIDs != null) { - sql += " docid IN (" + joinQuoted(docIDs) + ")"; - } else { - sql += " deleted=0"; - } - - sql += " AND current=1 AND docs.doc_id = revs.doc_id"; - - List argsList = new ArrayList(); - Object minKey = options.getStartKey(); - Object maxKey = options.getEndKey(); - boolean inclusiveMin = true; - boolean inclusiveMax = options.isInclusiveEnd(); - if(options.isDescending()) { - minKey = maxKey; - maxKey = options.getStartKey(); - inclusiveMin = inclusiveMax; - inclusiveMax = true; - } - - if(minKey != null) { - assert(minKey instanceof String); - if(inclusiveMin) { - sql += " AND docid >= ?"; - } else { - sql += " AND docid > ?"; - } - argsList.add((String)minKey); - } - - if(maxKey != null) { - assert(maxKey instanceof String); - if(inclusiveMax) { - sql += " AND docid <= ?"; - } - else { - sql += " AND docid < ?"; - } - argsList.add((String)maxKey); - } - - - String order = "ASC"; - if(options.isDescending()) { - order = "DESC"; - } - - sql += " ORDER BY docid " + order + ", revid DESC LIMIT ? OFFSET ?"; - - argsList.add(Integer.toString(options.getLimit())); - argsList.add(Integer.toString(options.getSkip())); - Cursor cursor = null; - long lastDocID = 0; - List> rows = null; - - try { - cursor = database.rawQuery(sql, argsList.toArray(new String[argsList.size()])); - - cursor.moveToFirst(); - rows = new ArrayList>(); - while(!cursor.isAfterLast()) { - long docNumericID = cursor.getLong(0); - if(docNumericID == lastDocID) { - cursor.moveToNext(); - continue; - } - lastDocID = docNumericID; - - String docId = cursor.getString(1); - String revId = cursor.getString(2); - Map docContents = null; - boolean deleted = cursor.getInt(3) > 0; - if(options.isIncludeDocs() && !deleted) { - byte[] json = cursor.getBlob(4); - long sequence = cursor.getLong(5); - docContents = documentPropertiesFromJSON(json, docId, revId, sequence, options.getContentOptions()); - } - - Map valueMap = new HashMap(); - valueMap.put("rev", revId); - - Map change = new HashMap(); - change.put("id", docId); - change.put("key", docId); - change.put("value", valueMap); - if(docContents != null) { - change.put("doc", docContents); - } - if(deleted) { - change.put("deleted", true); - } - - rows.add(change); - - cursor.moveToNext(); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting all docs", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - - int totalRows = cursor.getCount(); //??? Is this true, or does it ignore limit/offset? - Map result = new HashMap(); - result.put("rows", rows); - result.put("total_rows", totalRows); - result.put("offset", options.getSkip()); - if(updateSeq != 0) { - result.put("update_seq", updateSeq); - } - - - return result; - } - - public Map getAllDocs(TDQueryOptions options) { - return getDocsWithIDs(null, options); - } - - /*************************************************************************************************/ - /*** TDDatabase+Attachments ***/ - /*************************************************************************************************/ - - public TDStatus insertAttachmentForSequenceWithNameAndType(InputStream contentStream, long sequence, String name, String contentType, int revpos) { - assert(sequence > 0); - assert(name != null); - - TDBlobKey key = new TDBlobKey(); - if(!attachments.storeBlobStream(contentStream, key)) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - byte[] keyData = key.getBytes(); - try { - ContentValues args = new ContentValues(); - args.put("sequence", sequence); - args.put("filename", name); - args.put("key", keyData); - args.put("type", contentType); - args.put("length", attachments.getSizeOfBlob(key)); - args.put("revpos", revpos); - database.insert("attachments", null, args); - return new TDStatus(TDStatus.CREATED); - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error inserting attachment", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - } - - public TDStatus copyAttachmentNamedFromSequenceToSequence(String name, long fromSeq, long toSeq) { - assert(name != null); - assert(toSeq > 0); - if(fromSeq < 0) { - return new TDStatus(TDStatus.NOT_FOUND); - } - - Cursor cursor = null; - - String[] args = { Long.toString(toSeq), name, Long.toString(fromSeq), name }; - try { - database.execSQL("INSERT INTO attachments (sequence, filename, key, type, length, revpos) " + - "SELECT ?, ?, key, type, length, revpos FROM attachments " + - "WHERE sequence=? AND filename=?", args); - cursor = database.rawQuery("SELECT changes()", null); - cursor.moveToFirst(); - int rowsUpdated = cursor.getInt(0); - if(rowsUpdated == 0) { - // Oops. This means a glitch in our attachment-management or pull code, - // or else a bug in the upstream server. - Log.w(TDDatabase.TAG, "Can't find inherited attachment " + name + " from seq# " + Long.toString(fromSeq) + " to copy to " + Long.toString(toSeq)); - return new TDStatus(TDStatus.NOT_FOUND); - } - else { - return new TDStatus(TDStatus.OK); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error copying attachment", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } finally { - if(cursor != null) { - cursor.close(); - } - } - } - - /** - * Returns the content and MIME type of an attachment - */ - public TDAttachment getAttachmentForSequence(long sequence, String filename, TDStatus status) { - assert(sequence > 0); - assert(filename != null); - - - Cursor cursor = null; - - String[] args = { Long.toString(sequence), filename }; - try { - cursor = database.rawQuery("SELECT key, type FROM attachments WHERE sequence=? AND filename=?", args); - - if(!cursor.moveToFirst()) { - status.setCode(TDStatus.NOT_FOUND); - return null; - } - - byte[] keyData = cursor.getBlob(0); - //TODO add checks on key here? (ios version) - TDBlobKey key = new TDBlobKey(keyData); - InputStream contentStream = attachments.blobStreamForKey(key); - if(contentStream == null) { - Log.e(TDDatabase.TAG, "Failed to load attachment"); - status.setCode(TDStatus.INTERNAL_SERVER_ERROR); - return null; - } - else { - status.setCode(TDStatus.OK); - TDAttachment result = new TDAttachment(); - result.setContentStream(contentStream); - result.setContentType(cursor.getString(1)); - return result; - } - - - } catch (SQLException e) { - status.setCode(TDStatus.INTERNAL_SERVER_ERROR); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - - } - - /** - * Constructs an "_attachments" dictionary for a revision, to be inserted in its JSON body. - */ - public Map getAttachmentsDictForSequenceWithContent(long sequence, boolean withContent) { - assert(sequence > 0); - - Cursor cursor = null; - - String args[] = { Long.toString(sequence) }; - try { - cursor = database.rawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?", args); - - if(!cursor.moveToFirst()) { - return null; - } - - Map result = new HashMap(); - - while(!cursor.isAfterLast()) { - - byte[] keyData = cursor.getBlob(1); - TDBlobKey key = new TDBlobKey(keyData); - String digestString = "sha1-" + Base64.encodeBytes(keyData); - String dataBase64 = null; - if(withContent) { - byte[] data = attachments.blobForKey(key); - if(data != null) { - dataBase64 = Base64.encodeBytes(data); - } - else { - Log.w(TDDatabase.TAG, "Error loading attachment"); - } - } - - Map attachment = new HashMap(); - if(dataBase64 == null) { - attachment.put("stub", true); - } - else { - attachment.put("data", dataBase64); - } - attachment.put("digest", digestString); - attachment.put("content_type", cursor.getString(2)); - attachment.put("length", cursor.getInt(3)); - attachment.put("revpos", cursor.getInt(4)); - - result.put(cursor.getString(0), attachment); - - cursor.moveToNext(); - } - - return result; - - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting attachments for sequence", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - } - - /** - * Modifies a TDRevision's body by changing all attachments with revpos < minRevPos into stubs. - * - * @param rev - * @param minRevPos - */ - public void stubOutAttachmentsIn(TDRevision rev, int minRevPos) - { - if (minRevPos <= 1) { - return; - } - Map properties = (Map)rev.getProperties(); - Map attachments = null; - if(properties != null) { - attachments = (Map)properties.get("_attachments"); - } - Map editedProperties = null; - Map editedAttachments = null; - for (String name : attachments.keySet()) { - Map attachment = (Map)attachments.get(name); - int revPos = (Integer) attachment.get("revpos"); - Object stub = attachment.get("stub"); - if (revPos > 0 && revPos < minRevPos && (stub == null)) { - // Strip this attachment's body. First make its dictionary mutable: - if (editedProperties == null) { - editedProperties = new HashMap(properties); - editedAttachments = new HashMap(attachments); - editedProperties.put("_attachments", editedAttachments); - } - // ...then remove the 'data' and 'follows' key: - Map editedAttachment = new HashMap(attachment); - editedAttachment.remove("data"); - editedAttachment.remove("follows"); - editedAttachment.put("stub", true); - editedAttachments.put(name,editedAttachment); - Log.d(TDDatabase.TAG, "Stubbed out attachment" + rev + " " + name + ": revpos" + revPos + " " + minRevPos); - } - } - if (editedProperties != null) - rev.setProperties(editedProperties); - } - - /** - * Given a newly-added revision, adds the necessary attachment rows to the database and stores inline attachments into the blob store. - */ - public TDStatus processAttachmentsForRevision(TDRevision rev, long parentSequence) { - assert(rev != null); - long newSequence = rev.getSequence(); - assert(newSequence > parentSequence); - - // If there are no attachments in the new rev, there's nothing to do: - Map newAttachments = null; - Map properties = (Map)rev.getProperties(); - if(properties != null) { - newAttachments = (Map)properties.get("_attachments"); - } - if(newAttachments == null || newAttachments.size() == 0 || rev.isDeleted()) { - return new TDStatus(TDStatus.OK); - } - - for (String name : newAttachments.keySet()) { - - TDStatus status = new TDStatus(); - Map newAttach = (Map)newAttachments.get(name); - String newContentBase64 = (String)newAttach.get("data"); - if(newContentBase64 != null) { - // New item contains data, so insert it. First decode the data: - byte[] newContents; - try { - newContents = Base64.decode(newContentBase64); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "IOExeption parsing base64", e); - return new TDStatus(TDStatus.BAD_REQUEST); - } - if(newContents == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - // Now determine the revpos, i.e. generation # this was added in. Usually this is - // implicit, but a rev being pulled in replication will have it set already. - int generation = rev.getGeneration(); - assert(generation > 0); - Object revposObj = newAttach.get("revpos"); - int revpos = generation; - if(revposObj != null && revposObj instanceof Integer) { - revpos = ((Integer)revposObj).intValue(); - } - - if(revpos > generation) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - // Finally insert the attachment: - status = insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(newContents), newSequence, name, (String)newAttach.get("content_type"), revpos); - } - else { - // It's just a stub, so copy the previous revision's attachment entry: - //? Should I enforce that the type and digest (if any) match? - status = copyAttachmentNamedFromSequenceToSequence(name, parentSequence, newSequence); - } - if(!status.isSuccessful()) { - return status; - } - } - - return new TDStatus(TDStatus.OK); - } - - /** - * Updates or deletes an attachment, creating a new document revision in the process. - * Used by the PUT / DELETE methods called on attachment URLs. - */ - public TDRevision updateAttachment(String filename, InputStream contentStream, String contentType, String docID, String oldRevID, TDStatus status) { - status.setCode(TDStatus.BAD_REQUEST); - if(filename == null || filename.length() == 0 || (contentStream != null && contentType == null) || (oldRevID != null && docID == null) || (contentStream != null && docID == null)) { - return null; - } - - beginTransaction(); - try { - TDRevision oldRev = new TDRevision(docID, oldRevID, false); - if(oldRevID != null) { - // Load existing revision if this is a replacement: - TDStatus loadStatus = loadRevisionBody(oldRev, EnumSet.noneOf(TDContentOptions.class)); - status.setCode(loadStatus.getCode()); - if(!status.isSuccessful()) { - if(status.getCode() == TDStatus.NOT_FOUND && existsDocumentWithIDAndRev(docID, null)) { - status.setCode(TDStatus.CONFLICT); // if some other revision exists, it's a conflict - } - return null; - } - - Map attachments = (Map) oldRev.getProperties().get("_attachments"); - if(contentStream == null && attachments != null && !attachments.containsKey(filename)) { - status.setCode(TDStatus.NOT_FOUND); - return null; - } - // Remove the _attachments stubs so putRevision: doesn't copy the rows for me - // OPT: Would be better if I could tell loadRevisionBody: not to add it - if(attachments != null) { - Map properties = new HashMap(oldRev.getProperties()); - properties.remove("_attachments"); - oldRev.setBody(new TDBody(properties)); - } - } else { - // If this creates a new doc, it needs a body: - oldRev.setBody(new TDBody(new HashMap())); - } - - // Create a new revision: - TDRevision newRev = putRevision(oldRev, oldRevID, false, status); - if(newRev == null) { - return null; - } - - if(oldRevID != null) { - // Copy all attachment rows _except_ for the one being updated: - String[] args = { Long.toString(newRev.getSequence()), Long.toString(oldRev.getSequence()), filename }; - database.execSQL("INSERT INTO attachments " - + "(sequence, filename, key, type, length, revpos) " - + "SELECT ?, filename, key, type, length, revpos FROM attachments " - + "WHERE sequence=? AND filename != ?", args); - } - - if(contentStream != null) { - // If not deleting, add a new attachment entry: - TDStatus insertStatus = insertAttachmentForSequenceWithNameAndType(contentStream, newRev.getSequence(), - filename, contentType, newRev.getGeneration()); - status.setCode(insertStatus.getCode()); - - if(!status.isSuccessful()) { - return null; - } - } - - status.setCode((contentStream != null) ? TDStatus.CREATED : TDStatus.OK); - return newRev; - - } catch(SQLException e) { - Log.e(TAG, "Error uploading attachment", e); - status.setCode(TDStatus.INTERNAL_SERVER_ERROR); - return null; - } finally { - endTransaction(status.isSuccessful()); - } - } - - /** - * Deletes obsolete attachments from the database and blob store. - */ - public TDStatus garbageCollectAttachments() { - // First delete attachment rows for already-cleared revisions: - // OPT: Could start after last sequence# we GC'd up to - - try { - database.execSQL("DELETE FROM attachments WHERE sequence IN " + - "(SELECT sequence from revs WHERE json IS null)"); - } - catch(SQLException e) { - Log.e(TDDatabase.TAG, "Error deleting attachments", e); - } - - // Now collect all remaining attachment IDs and tell the store to delete all but these: - Cursor cursor = null; - try { - cursor = database.rawQuery("SELECT DISTINCT key FROM attachments", null); - - cursor.moveToFirst(); - List allKeys = new ArrayList(); - while(!cursor.isAfterLast()) { - TDBlobKey key = new TDBlobKey(cursor.getBlob(0)); - allKeys.add(key); - cursor.moveToNext(); - } - - int numDeleted = attachments.deleteBlobsExceptWithKeys(allKeys); - if(numDeleted < 0) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - Log.v(TDDatabase.TAG, "Deleted " + numDeleted + " attachments"); - - return new TDStatus(TDStatus.OK); - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error finding attachment keys in use", e); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } finally { - if(cursor != null) { - cursor.close(); - } - } - } - - /*************************************************************************************************/ - /*** TDDatabase+Insertion ***/ - /*************************************************************************************************/ - - /** DOCUMENT & REV IDS: **/ - - public static boolean isValidDocumentId(String id) { - // http://wiki.apache.org/couchdb/HTTP_Document_API#Documents - if(id == null || id.length() == 0) { - return false; - } - if(id.charAt(0) == '_') { - return (id.startsWith("_design/")); - } - return true; - // "_local/*" is not a valid document ID. Local docs have their own API and shouldn't get here. - } - - public static String generateDocumentId() { - return TDMisc.TDCreateUUID(); - } - - public String generateNextRevisionID(String revisionId) { - // Revision IDs have a generation count, a hyphen, and a UUID. - int generation = 0; - if(revisionId != null) { - generation = TDRevision.generationFromRevID(revisionId); - if(generation == 0) { - return null; - } - } - String digest = TDMisc.TDCreateUUID(); //TODO: Generate canonical digest of body - return Integer.toString(generation + 1) + "-" + digest; - } - - public long insertDocumentID(String docId) { - long rowId = -1; - try { - ContentValues args = new ContentValues(); - args.put("docid", docId); - rowId = database.insert("docs", null, args); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error inserting document id", e); - } - return rowId; - } - - public long getOrInsertDocNumericID(String docId) { - long docNumericId = getDocNumericID(docId); - if(docNumericId == 0) { - docNumericId = insertDocumentID(docId); - } - return docNumericId; - } - - /** - * Parses the _revisions dict from a document into an array of revision ID strings - */ - public static List parseCouchDBRevisionHistory(Map docProperties) { - Map revisions = (Map)docProperties.get("_revisions"); - if(revisions == null) { - return null; - } - List revIDs = (List)revisions.get("ids"); - Integer start = (Integer)revisions.get("start"); - if(start != null) { - for(int i=0; i < revIDs.size(); i++) { - String revID = revIDs.get(i); - revIDs.set(i, Integer.toString(start--) + "-" + revID); - } - } - return revIDs; - } - - /** INSERTION: **/ - - public byte[] encodeDocumentJSON(TDRevision rev) { - - Map origProps = rev.getProperties(); - if(origProps == null) { - return null; - } - - // Don't allow any "_"-prefixed keys. Known ones we'll ignore, unknown ones are an error. - Map properties = new HashMap(origProps.size()); - for (String key : origProps.keySet()) { - if(key.startsWith("_")) { - if(!KNOWN_SPECIAL_KEYS.contains(key)) { - Log.e(TAG, "TDDatabase: Invalid top-level key '" + key + "' in document to be inserted"); - return null; - } - } else { - properties.put(key, origProps.get(key)); - } - } - - byte[] json = null; - try { - json = TDServer.getObjectMapper().writeValueAsBytes(properties); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error serializing " + rev + " to JSON", e); - } - return json; - } - - public void notifyChange(TDRevision rev, URL source) { - Map changeNotification = new HashMap(); - changeNotification.put("rev", rev); - changeNotification.put("seq", rev.getSequence()); - if(source != null) { - changeNotification.put("source", source); - } - setChanged(); - notifyObservers(changeNotification); - } - - public long insertRevision(TDRevision rev, long docNumericID, long parentSequence, boolean current, byte[] data) { - long rowId = 0; - try { - ContentValues args = new ContentValues(); - args.put("doc_id", docNumericID); - args.put("revid", rev.getRevId()); - if(parentSequence != 0) { - args.put("parent", parentSequence); - } - args.put("current", current); - args.put("deleted", rev.isDeleted()); - args.put("json", data); - rowId = database.insert("revs", null, args); - rev.setSequence(rowId); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error inserting revision", e); - } - return rowId; - } - - private TDRevision putRevision(TDRevision rev, String prevRevId, TDStatus resultStatus) { - return putRevision(rev, prevRevId, false, resultStatus); - } - - /** - * Stores a new (or initial) revision of a document. - * - * This is what's invoked by a PUT or POST. As with those, the previous revision ID must be supplied when necessary and the call will fail if it doesn't match. - * - * @param rev The revision to add. If the docID is null, a new UUID will be assigned. Its revID must be null. It must have a JSON body. - * @param prevRevId The ID of the revision to replace (same as the "?rev=" parameter to a PUT), or null if this is a new document. - * @param allowConflict If false, an error status 409 will be returned if the insertion would create a conflict, i.e. if the previous revision already has a child. - * @param resultStatus On return, an HTTP status code indicating success or failure. - * @return A new TDRevision with the docID, revID and sequence filled in (but no body). - */ - @SuppressWarnings("unchecked") - public TDRevision putRevision(TDRevision rev, String prevRevId, boolean allowConflict, TDStatus resultStatus) { - // prevRevId is the rev ID being replaced, or nil if an insert - String docId = rev.getDocId(); - boolean deleted = rev.isDeleted(); - if((rev == null) || ((prevRevId != null) && (docId == null)) || (deleted && (docId == null)) - || ((docId != null) && !isValidDocumentId(docId))) { - resultStatus.setCode(TDStatus.BAD_REQUEST); - return null; - } - - resultStatus.setCode(TDStatus.INTERNAL_SERVER_ERROR); - beginTransaction(); - Cursor cursor = null; - - //// PART I: In which are performed lookups and validations prior to the insert... - - long docNumericID = (docId != null) ? getDocNumericID(docId) : 0; - long parentSequence = 0; - try { - if(prevRevId != null) { - // Replacing: make sure given prevRevID is current & find its sequence number: - if(docNumericID <= 0) { - resultStatus.setCode(TDStatus.NOT_FOUND); - return null; - } - - String[] args = {Long.toString(docNumericID), prevRevId}; - String additionalWhereClause = ""; - if(!allowConflict) { - additionalWhereClause = "AND current=1"; - } - - cursor = database.rawQuery("SELECT sequence FROM revs WHERE doc_id=? AND revid=? " + additionalWhereClause + " LIMIT 1", args); - - if(cursor.moveToFirst()) { - parentSequence = cursor.getLong(0); - } - - if(parentSequence == 0) { - // Not found: either a 404 or a 409, depending on whether there is any current revision - if(!allowConflict && existsDocumentWithIDAndRev(docId, null)) { - resultStatus.setCode(TDStatus.CONFLICT); - return null; - } - else { - resultStatus.setCode(TDStatus.NOT_FOUND); - return null; - } - } - - if(validations != null && validations.size() > 0) { - // Fetch the previous revision and validate the new one against it: - TDRevision prevRev = new TDRevision(docId, prevRevId, false); - TDStatus status = validateRevision(rev, prevRev); - if(!status.isSuccessful()) { - resultStatus.setCode(status.getCode()); - return null; - } - } - - // Make replaced rev non-current: - ContentValues updateContent = new ContentValues(); - updateContent.put("current", 0); - database.update("revs", updateContent, "sequence=" + parentSequence, null); - } - else { - // Inserting first revision. - if(deleted && (docId != null)) { - // Didn't specify a revision to delete: 404 or a 409, depending - if(existsDocumentWithIDAndRev(docId, null)) { - resultStatus.setCode(TDStatus.CONFLICT); - return null; - } - else { - resultStatus.setCode(TDStatus.NOT_FOUND); - return null; - } - } - - // Validate: - TDStatus status = validateRevision(rev, null); - if(!status.isSuccessful()) { - resultStatus.setCode(status.getCode()); - return null; - } - - if(docId != null) { - // Inserting first revision, with docID given (PUT): - if(docNumericID <= 0) { - // Doc doesn't exist at all; create it: - docNumericID = insertDocumentID(docId); - if(docNumericID <= 0) { - return null; - } - } else { - // Doc exists; check whether current winning revision is deleted: - String[] args = { Long.toString(docNumericID) }; - cursor = database.rawQuery("SELECT sequence, deleted FROM revs WHERE doc_id=? and current=1 ORDER BY revid DESC LIMIT 1", args); - - if(cursor.moveToFirst()) { - boolean wasAlreadyDeleted = (cursor.getInt(1) > 0); - if(wasAlreadyDeleted) { - // Make the deleted revision no longer current: - ContentValues updateContent = new ContentValues(); - updateContent.put("current", 0); - database.update("revs", updateContent, "sequence=" + cursor.getLong(0), null); - } - else if (!allowConflict) { - // docId already exists, current not deleted, conflict - resultStatus.setCode(TDStatus.CONFLICT); - return null; - } - } - } - } - else { - // Inserting first revision, with no docID given (POST): generate a unique docID: - docId = TDDatabase.generateDocumentId(); - docNumericID = insertDocumentID(docId); - if(docNumericID <= 0) { - return null; - } - } - } - - //// PART II: In which insertion occurs... - - // Bump the revID and update the JSON: - String newRevId = generateNextRevisionID(prevRevId); - byte[] data = null; - if(!rev.isDeleted()) { - data = encodeDocumentJSON(rev); - if(data == null) { - // bad or missing json - resultStatus.setCode(TDStatus.BAD_REQUEST); - return null; - } - } - - rev = rev.copyWithDocID(docId, newRevId); - - // Now insert the rev itself: - long newSequence = insertRevision(rev, docNumericID, parentSequence, true, data); - if(newSequence == 0) { - return null; - } - - // Store any attachments: - if(attachments != null) { - TDStatus status = processAttachmentsForRevision(rev, parentSequence); - if(!status.isSuccessful()) { - resultStatus.setCode(status.getCode()); - return null; - } - } - - // Success! - if(deleted) { - resultStatus.setCode(TDStatus.OK); - } - else { - resultStatus.setCode(TDStatus.CREATED); - } - - } catch (SQLException e1) { - Log.e(TDDatabase.TAG, "Error putting revision", e1); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - endTransaction(resultStatus.isSuccessful()); - } - - //// EPILOGUE: A change notification is sent... - notifyChange(rev, null); - return rev; - } - - /** - * Inserts an already-existing revision replicated from a remote database. - * - * It must already have a revision ID. This may create a conflict! The revision's history must be given; ancestor revision IDs that don't already exist locally will create phantom revisions with no content. - */ - public TDStatus forceInsert(TDRevision rev, List revHistory, URL source) { - - String docId = rev.getDocId(); - String revId = rev.getRevId(); - if(!isValidDocumentId(docId) || (revId == null)) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - int historyCount = revHistory.size(); - if(historyCount == 0) { - revHistory = new ArrayList(); - revHistory.add(revId); - historyCount = 1; - } else if(!revHistory.get(0).equals(rev.getRevId())) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - boolean success = false; - beginTransaction(); - try { - // First look up all locally-known revisions of this document: - long docNumericID = getOrInsertDocNumericID(docId); - TDRevisionList localRevs = getAllRevisionsOfDocumentID(docId, docNumericID, false); - if(localRevs == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - // Walk through the remote history in chronological order, matching each revision ID to - // a local revision. When the list diverges, start creating blank local revisions to fill - // in the local history: - long sequence = 0; - long localParentSequence = 0; - for(int i = revHistory.size() - 1; i >= 0; --i) { - revId = revHistory.get(i); - TDRevision localRev = localRevs.revWithDocIdAndRevId(docId, revId); - if(localRev != null) { - // This revision is known locally. Remember its sequence as the parent of the next one: - sequence = localRev.getSequence(); - assert(sequence > 0); - localParentSequence = sequence; - } - else { - // This revision isn't known, so add it: - TDRevision newRev; - byte[] data = null; - boolean current = false; - if(i == 0) { - // Hey, this is the leaf revision we're inserting: - newRev = rev; - if(!rev.isDeleted()) { - data = encodeDocumentJSON(rev); - if(data == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - } - current = true; - } - else { - // It's an intermediate parent, so insert a stub: - newRev = new TDRevision(docId, revId, false); - } - - // Insert it: - sequence = insertRevision(newRev, docNumericID, sequence, current, data); - - if(sequence <= 0) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - if(i == 0) { - // Write any changed attachments for the new revision: - TDStatus status = processAttachmentsForRevision(rev, localParentSequence); - if(!status.isSuccessful()) { - return status; - } - } - } - } - - // Mark the latest local rev as no longer current: - if(localParentSequence > 0 && localParentSequence != sequence) { - ContentValues args = new ContentValues(); - args.put("current", 0); - String[] whereArgs = { Long.toString(localParentSequence) }; - try { - database.update("revs", args, "sequence=?", whereArgs); - } catch (SQLException e) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - } - - success = true; - } catch(SQLException e) { - endTransaction(success); - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } finally { - endTransaction(success); - } - - // Notify and return: - notifyChange(rev, source); - return new TDStatus(TDStatus.CREATED); - } - - /** VALIDATION **/ - - /** - * Define or clear a named document validation function. - */ - public void defineValidation(String name, TDValidationBlock validationBlock) { - if(validations == null) { - validations = new HashMap(); - } - validations.put(name, validationBlock); - } - - public TDValidationBlock getValidationNamed(String name) { - TDValidationBlock result = null; - if(validations != null) { - result = validations.get(name); - } - return result; - } - - public TDStatus validateRevision(TDRevision newRev, TDRevision oldRev) { - TDStatus result = new TDStatus(TDStatus.OK); - if(validations == null || validations.size() == 0) { - return result; - } - TDValidationContextImpl context = new TDValidationContextImpl(this, oldRev); - for (String validationName : validations.keySet()) { - TDValidationBlock validation = getValidationNamed(validationName); - if(!validation.validate(newRev, context)) { - result.setCode(context.getErrorType().getCode()); - break; - } - } - return result; - } - - /*************************************************************************************************/ - /*** TDDatabase+Replication ***/ - /*************************************************************************************************/ - - //TODO implement missing replication methods - - public List getActiveReplicators() { - return activeReplicators; - } - - public TDReplicator getActiveReplicator(URL remote, boolean push) { - if(activeReplicators != null) { - for (TDReplicator replicator : activeReplicators) { - if(replicator.getRemote().equals(remote) && replicator.isPush() == push && replicator.isRunning()) { - return replicator; - } - } - } - return null; - } - - public TDReplicator getReplicator(URL remote, boolean push, boolean continuous, ScheduledExecutorService workExecutor) { - TDReplicator replicator = getReplicator(remote, null, push, continuous, workExecutor); - - return replicator; - } - - public TDReplicator getReplicator(URL remote, HttpClientFactory httpClientFactory, boolean push, boolean continuous, ScheduledExecutorService workExecutor) { - TDReplicator result = getActiveReplicator(remote, push); - if(result != null) { - return result; - } - result = push ? new TDPusher(this, remote, continuous, httpClientFactory, workExecutor) : new TDPuller(this, remote, continuous, httpClientFactory, workExecutor); - - if(activeReplicators == null) { - activeReplicators = new ArrayList(); - } - activeReplicators.add(result); - return result; - } - - public String lastSequenceWithRemoteURL(URL url, boolean push) { - Cursor cursor = null; - String result = null; - try { - String[] args = { url.toExternalForm(), Integer.toString(push ? 1 : 0) }; - cursor = database.rawQuery("SELECT last_sequence FROM replicators WHERE remote=? AND push=?", args); - if(cursor.moveToFirst()) { - result = cursor.getString(0); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting last sequence", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - return result; - } - - public boolean setLastSequence(String lastSequence, URL url, boolean push) { - ContentValues values = new ContentValues(); - values.put("remote", url.toExternalForm()); - values.put("push", push); - values.put("last_sequence", lastSequence); - long newId = database.insertWithOnConflict("replicators", null, values, SQLiteDatabase.CONFLICT_REPLACE); - return (newId == -1); - } - - public static String quote(String string) { - return string.replace("'", "''"); - } - - public static String joinQuoted(List strings) { - if(strings.size() == 0) { - return ""; - } - - String result = "'"; - boolean first = true; - for (String string : strings) { - if(first) { - first = false; - } - else { - result = result + "','"; - } - result = result + quote(string); - } - result = result + "'"; - - return result; - } - - public boolean findMissingRevisions(TDRevisionList touchRevs) { - if(touchRevs.size() == 0) { - return true; - } - - String quotedDocIds = joinQuoted(touchRevs.getAllDocIds()); - String quotedRevIds = joinQuoted(touchRevs.getAllRevIds()); - - String sql = "SELECT docid, revid FROM revs, docs " + - "WHERE docid IN (" + - quotedDocIds + - ") AND revid in (" + - quotedRevIds + ")" + - " AND revs.doc_id == docs.doc_id"; - - Cursor cursor = null; - try { - cursor = database.rawQuery(sql, null); - cursor.moveToFirst(); - while(!cursor.isAfterLast()) { - TDRevision rev = touchRevs.revWithDocIdAndRevId(cursor.getString(0), cursor.getString(1)); - - if(rev != null) { - touchRevs.remove(rev); - } - - cursor.moveToNext(); - } - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error finding missing revisions", e); - return false; - } finally { - if(cursor != null) { - cursor.close(); - } - } - return true; - } - - /*************************************************************************************************/ - /*** TDDatabase+LocalDocs ***/ - /*************************************************************************************************/ - - public TDRevision getLocalDocument(String docID, String revID) { - TDRevision result = null; - Cursor cursor = null; - try { - String[] args = { docID }; - cursor = database.rawQuery("SELECT revid, json FROM localdocs WHERE docid=?", args); - if(cursor.moveToFirst()) { - String gotRevID = cursor.getString(0); - if(revID != null && (!revID.equals(gotRevID))) { - return null; - } - byte[] json = cursor.getBlob(1); - Map properties = null; - try { - properties = TDServer.getObjectMapper().readValue(json, Map.class); - properties.put("_id", docID); - properties.put("_rev", gotRevID); - result = new TDRevision(docID, gotRevID, false); - result.setProperties(properties); - } catch (Exception e) { - Log.w(TAG, "Error parsing local doc JSON", e); - return null; - } - - } - return result; - } catch (SQLException e) { - Log.e(TDDatabase.TAG, "Error getting local document", e); - return null; - } finally { - if(cursor != null) { - cursor.close(); - } - } - } - - public TDRevision putLocalRevision(TDRevision revision, String prevRevID, TDStatus status) { - String docID = revision.getDocId(); - if(!docID.startsWith("_local/")) { - status.setCode(TDStatus.BAD_REQUEST); - return null; - } - - if(!revision.isDeleted()) { - // PUT: - byte[] json = encodeDocumentJSON(revision); - String newRevID; - if(prevRevID != null) { - int generation = TDRevision.generationFromRevID(prevRevID); - if(generation == 0) { - status.setCode(TDStatus.BAD_REQUEST); - return null; - } - newRevID = Integer.toString(++generation) + "-local"; - ContentValues values = new ContentValues(); - values.put("revid", newRevID); - values.put("json", json); - String[] whereArgs = { docID, prevRevID }; - try { - int rowsUpdated = database.update("localdocs", values, "docid=? AND revid=?", whereArgs); - if(rowsUpdated == 0) { - status.setCode(TDStatus.CONFLICT); - return null; - } - } catch (SQLException e) { - status.setCode(TDStatus.INTERNAL_SERVER_ERROR); - return null; - } - } else { - newRevID = "1-local"; - ContentValues values = new ContentValues(); - values.put("docid", docID); - values.put("revid", newRevID); - values.put("json", json); - try { - database.insertWithOnConflict("localdocs", null, values, SQLiteDatabase.CONFLICT_IGNORE); - } catch (SQLException e) { - status.setCode(TDStatus.INTERNAL_SERVER_ERROR); - return null; - } - } - status.setCode(TDStatus.CREATED); - return revision.copyWithDocID(docID, newRevID); - } - else { - // DELETE: - TDStatus deleteStatus = deleteLocalDocument(docID, prevRevID); - status.setCode(deleteStatus.getCode()); - return (status.isSuccessful()) ? revision : null; - } - } - - public TDStatus deleteLocalDocument(String docID, String revID) { - if(docID == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - if(revID == null) { - // Didn't specify a revision to delete: 404 or a 409, depending - return (getLocalDocument(docID, null) != null) ? new TDStatus(TDStatus.CONFLICT) : new TDStatus(TDStatus.NOT_FOUND); - } - String[] whereArgs = { docID, revID }; - try { - int rowsDeleted = database.delete("localdocs", "docid=? AND revid=?", whereArgs); - if(rowsDeleted == 0) { - return (getLocalDocument(docID, null) != null) ? new TDStatus(TDStatus.CONFLICT) : new TDStatus(TDStatus.NOT_FOUND); - } - return new TDStatus(TDStatus.OK); - } catch (SQLException e) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - } + private String path; + private String name; + private SQLiteDatabase database; + private boolean open = false; + private int transactionLevel = 0; + public static final String TAG = "TDDatabase"; + + private Map views; + private Map filters; + private Map validations; + private List activeReplicators; + private TDBlobStore attachments; + + /** + * Options for what metadata to include in document bodies + */ + public enum TDContentOptions { + TDIncludeAttachments, TDIncludeConflicts, TDIncludeRevs, TDIncludeRevsInfo, TDIncludeLocalSeq, TDNoBody + } + + private static final Set KNOWN_SPECIAL_KEYS; + + static { + KNOWN_SPECIAL_KEYS = new HashSet(); + KNOWN_SPECIAL_KEYS.add("_id"); + KNOWN_SPECIAL_KEYS.add("_rev"); + KNOWN_SPECIAL_KEYS.add("_attachments"); + KNOWN_SPECIAL_KEYS.add("_deleted"); + KNOWN_SPECIAL_KEYS.add("_revisions"); + KNOWN_SPECIAL_KEYS.add("_revs_info"); + KNOWN_SPECIAL_KEYS.add("_conflicts"); + KNOWN_SPECIAL_KEYS.add("_deleted_conflicts"); + } + + public static final String SCHEMA = "" + + "CREATE TABLE docs ( " + + " doc_id INTEGER PRIMARY KEY, " + + " docid TEXT UNIQUE NOT NULL); " + + " CREATE INDEX docs_docid ON docs(docid); " + + " CREATE TABLE revs ( " + + " sequence INTEGER PRIMARY KEY AUTOINCREMENT, " + + " doc_id INTEGER NOT NULL REFERENCES docs(doc_id) ON DELETE CASCADE, " + + " revid TEXT NOT NULL, " + + " parent INTEGER REFERENCES revs(sequence) ON DELETE SET NULL, " + + " current BOOLEAN, " + + " deleted BOOLEAN DEFAULT 0, " + + " json BLOB); " + + " CREATE INDEX revs_by_id ON revs(revid, doc_id); " + + " CREATE INDEX revs_current ON revs(doc_id, current); " + + " CREATE INDEX revs_parent ON revs(parent); " + + " CREATE TABLE localdocs ( " + + " docid TEXT UNIQUE NOT NULL, " + + " revid TEXT NOT NULL, " + + " json BLOB); " + + " CREATE INDEX localdocs_by_docid ON localdocs(docid); " + + " CREATE TABLE views ( " + + " view_id INTEGER PRIMARY KEY, " + + " name TEXT UNIQUE NOT NULL," + + " version TEXT, " + + " lastsequence INTEGER DEFAULT 0); " + + " CREATE INDEX views_by_name ON views(name); " + + " CREATE TABLE maps ( " + + " view_id INTEGER NOT NULL REFERENCES views(view_id) ON DELETE CASCADE, " + + " sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, " + + " key TEXT NOT NULL COLLATE JSON, " + + " value TEXT); " + + " CREATE INDEX maps_keys on maps(view_id, key COLLATE JSON); " + + " CREATE TABLE attachments ( " + + " sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, " + + " filename TEXT NOT NULL, " + + " key BLOB NOT NULL, " + + " type TEXT, " + + " length INTEGER NOT NULL, " + + " revpos INTEGER DEFAULT 0); " + + " CREATE INDEX attachments_by_sequence on attachments(sequence, filename); " + + " CREATE TABLE replicators ( " + + " remote TEXT NOT NULL, " + " push BOOLEAN, " + + " last_sequence TEXT, " + + " UNIQUE (remote, push)); " + + " PRAGMA user_version = 3"; // at the end, update user_version + + /*************************************************************************************************/ + /*** TDDatabase ***/ + /*************************************************************************************************/ + + public String getAttachmentStorePath() { + String attachmentStorePath = path; + int lastDotPosition = attachmentStorePath.lastIndexOf('.'); + if (lastDotPosition > 0) { + attachmentStorePath = attachmentStorePath.substring(0, + lastDotPosition); + } + attachmentStorePath = attachmentStorePath + File.separator + + "attachments"; + return attachmentStorePath; + } + + public static TDDatabase createEmptyDBAtPath(String path) { + if (!FileDirUtils.removeItemIfExists(path)) { + return null; + } + TDDatabase result = new TDDatabase(path); + File af = new File(result.getAttachmentStorePath()); + // recursively delete attachments path + if (!FileDirUtils.deleteRecursive(af)) { + return null; + } + if (!result.open()) { + return null; + } + return result; + } + + public TDDatabase(String path) { + assert (path.startsWith("/")); // path must be absolute + this.path = path; + this.name = FileDirUtils.getDatabaseNameFromPath(path); + } + + public String toString() { + return this.getClass().getName() + "[" + path + "]"; + } + + public boolean exists() { + return new File(path).exists(); + } + + /** + * Replaces the database with a copy of another database. + * + * This is primarily used to install a canned database on first launch of an + * app, in which case you should first check .exists to avoid replacing the + * database if it exists already. The canned database would have been copied + * into your app bundle at build time. + * + * @param databasePath + * Path of the database file that should replace this one. + * @param attachmentsPath + * Path of the associated attachments directory, or nil if there + * are no attachments. + * @return true if the database was copied, IOException if an error occurs + **/ + public boolean replaceWithDatabase(String databasePath, + String attachmentsPath) throws IOException { + String dstAttachmentsPath = this.getAttachmentStorePath(); + File sourceFile = new File(databasePath); + File destFile = new File(path); + FileDirUtils.copyFile(sourceFile, destFile); + File attachmentsFile = new File(dstAttachmentsPath); + FileDirUtils.deleteRecursive(attachmentsFile); + attachmentsFile.mkdirs(); + if (attachmentsPath != null) { + FileDirUtils.copyFolder(new File(attachmentsPath), attachmentsFile); + } + return true; + } + + public boolean initialize(String statements) { + try { + for (String statement : statements.split(";")) { + database.execSQL(statement); + } + } catch (SQLException e) { + close(); + return false; + } + return true; + } + + public boolean open() { + if (open) { + return true; + } + + try { + database = SQLiteDatabase.openDatabase(path, null, + SQLiteDatabase.CREATE_IF_NECESSARY); + TDCollateJSON.registerCustomCollators(database); + } catch (SQLiteException e) { + Log.e(TDDatabase.TAG, "Error opening", e); + return false; + } + + // Stuff we need to initialize every time the database opens: + if (!initialize("PRAGMA foreign_keys = ON;")) { + Log.e(TDDatabase.TAG, "Error turning on foreign keys"); + return false; + } + + // Check the user_version number we last stored in the database: + int dbVersion = database.getVersion(); + + // Incompatible version changes increment the hundreds' place: + if (dbVersion >= 100) { + Log.w(TDDatabase.TAG, "TDDatabase: Database version (" + dbVersion + + ") is newer than I know how to work with"); + database.close(); + return false; + } + + if (dbVersion < 1) { + // First-time initialization: + // (Note: Declaring revs.sequence as AUTOINCREMENT means the values + // will always be + // monotonically increasing, never reused. See + // ) + if (!initialize(SCHEMA)) { + database.close(); + return false; + } + dbVersion = 3; + } + + if (dbVersion < 2) { + // Version 2: added attachments.revpos + String upgradeSql = "ALTER TABLE attachments ADD COLUMN revpos INTEGER DEFAULT 0; " + + "PRAGMA user_version = 2"; + if (!initialize(upgradeSql)) { + database.close(); + return false; + } + dbVersion = 2; + } + + if (dbVersion < 3) { + String upgradeSql = "CREATE TABLE localdocs ( " + + "docid TEXT UNIQUE NOT NULL, " + "revid TEXT NOT NULL, " + + "json BLOB); " + + "CREATE INDEX localdocs_by_docid ON localdocs(docid); " + + "PRAGMA user_version = 3"; + if (!initialize(upgradeSql)) { + database.close(); + return false; + } + dbVersion = 3; + } + + if (dbVersion < 4) { + String upgradeSql = "CREATE TABLE info ( " + + "key TEXT PRIMARY KEY, " + "value TEXT); " + + "INSERT INTO INFO (key, value) VALUES ('privateUUID', '" + + TDMisc.TDCreateUUID() + "'); " + + "INSERT INTO INFO (key, value) VALUES ('publicUUID', '" + + TDMisc.TDCreateUUID() + "'); " + + "PRAGMA user_version = 4"; + if (!initialize(upgradeSql)) { + database.close(); + return false; + } + } + + if (dbVersion < 5) { + String upgradeSql = "CREATE TABLE replicator_log ( " + + " remote TEXT NOT NULL, " + + " push BOOLEAN, " + + " docid TEXT NOT NULL, " + + " revid TEXT NOT NULL, " + + " deleted BOOLEAN, " + + " sequence INTEGER, " + + " lastUpdated INTEGER, " + + " UNIQUE (remote, push, docid, revid)); " + + "PRAGMA user_version = 5"; + if (!initialize(upgradeSql)) { + database.close(); + return false; + } + } + + try { + attachments = new TDBlobStore(getAttachmentStorePath()); + } catch (IllegalArgumentException e) { + Log.e(TDDatabase.TAG, "Could not initialize attachment store", e); + database.close(); + return false; + } + + open = true; + return true; + } + + public boolean close() { + if (!open) { + return false; + } + + if (views != null) { + for (TDView view : views.values()) { + view.databaseClosing(); + } + } + views = null; + + if (activeReplicators != null) { + for (TDReplicator replicator : activeReplicators) { + replicator.databaseClosing(); + } + activeReplicators = null; + } + + if (database != null && database.isOpen()) { + database.close(); + } + open = false; + transactionLevel = 0; + return true; + } + + public boolean deleteDatabase() { + if (open) { + if (!close()) { + return false; + } + } else if (!exists()) { + return true; + } + File file = new File(path); + File attachmentsFile = new File(getAttachmentStorePath()); + + boolean deleteStatus = file.delete(); + // recursively delete attachments path + boolean deleteAttachmentStatus = FileDirUtils + .deleteRecursive(attachmentsFile); + return deleteStatus && deleteAttachmentStatus; + } + + public String getPath() { + return path; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + // Leave this package protected, so it can only be used + // TDView uses this accessor + SQLiteDatabase getDatabase() { + return database; + } + + public TDBlobStore getAttachments() { + return attachments; + } + + public long totalDataSize() { + File f = new File(path); + long size = f.length() + attachments.totalDataSize(); + return size; + } + + /** + * Begins a database transaction. Transactions can nest. Every + * beginTransaction() must be balanced by a later endTransaction() + */ + public boolean beginTransaction() { + try { + database.beginTransaction(); + ++transactionLevel; + // Log.v(TAG, "Begin transaction (level " + + // Integer.toString(transactionLevel) + ")..."); + } catch (SQLException e) { + return false; + } + return true; + } + + /** + * Commits or aborts (rolls back) a transaction. + * + * @param commit + * If true, commits; if false, aborts and rolls back, undoing all + * changes made since the matching -beginTransaction call, + * *including* any committed nested transactions. + */ + public boolean endTransaction(boolean commit) { + assert (transactionLevel > 0); + + if (commit) { + // Log.v(TAG, "Committing transaction (level " + + // Integer.toString(transactionLevel) + ")..."); + database.setTransactionSuccessful(); + database.endTransaction(); + } else { + Log.v(TAG, + "CANCEL transaction (level " + + Integer.toString(transactionLevel) + ")..."); + try { + database.endTransaction(); + } catch (SQLException e) { + return false; + } + } + + --transactionLevel; + return true; + } + + /** + * Compacts the database storage by removing the bodies and attachments of + * obsolete revisions. + */ + public TDStatus compact() { + // Can't delete any rows because that would lose revision tree history. + // But we can remove the JSON of non-current revisions, which is most of + // the space. + try { + Log.v(TDDatabase.TAG, "Deleting JSON of old revisions..."); + ContentValues args = new ContentValues(); + args.put("json", (String) null); + database.update("revs", args, "current=0", null); + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error compacting", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + Log.v(TDDatabase.TAG, "Deleting old attachments..."); + TDStatus result = garbageCollectAttachments(); + + Log.v(TDDatabase.TAG, "Vacuuming SQLite database..."); + try { + database.execSQL("VACUUM"); + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error vacuuming database", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + return result; + } + + public String privateUUID() { + String result = null; + Cursor cursor = null; + try { + cursor = database.rawQuery( + "SELECT value FROM info WHERE key='privateUUID'", null); + if (cursor.moveToFirst()) { + result = cursor.getString(0); + } + } catch (SQLException e) { + Log.e(TAG, "Error querying privateUUID", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + public String publicUUID() { + String result = null; + Cursor cursor = null; + try { + cursor = database.rawQuery( + "SELECT value FROM info WHERE key='publicUUID'", null); + if (cursor.moveToFirst()) { + result = cursor.getString(0); + } + } catch (SQLException e) { + Log.e(TAG, "Error querying privateUUID", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + /** GETTING DOCUMENTS: **/ + + public int getDocumentCount() { + String sql = "SELECT COUNT(DISTINCT doc_id) FROM revs WHERE current=1 AND deleted=0"; + Cursor cursor = null; + int result = 0; + try { + cursor = database.rawQuery(sql, null); + if (cursor.moveToFirst()) { + result = cursor.getInt(0); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting document count", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + public long getLastSequence() { + String sql = "SELECT MAX(sequence) FROM revs"; + Cursor cursor = null; + long result = 0; + try { + cursor = database.rawQuery(sql, null); + if (cursor.moveToFirst()) { + result = cursor.getLong(0); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting last sequence", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + /** + * Splices the contents of an NSDictionary into JSON data (that already + * represents a dict), without parsing the JSON. + */ + public byte[] appendDictToJSON(byte[] json, Map dict) { + if (dict.size() == 0) { + return json; + } + + byte[] extraJSON = null; + try { + extraJSON = TDServer.getObjectMapper().writeValueAsBytes(dict); + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error convert extra JSON to bytes", e); + return null; + } + + int jsonLength = json.length; + int extraLength = extraJSON.length; + if (jsonLength == 2) { // Original JSON was empty + return extraJSON; + } + byte[] newJson = new byte[jsonLength + extraLength - 1]; + System.arraycopy(json, 0, newJson, 0, jsonLength - 1); // Copy json w/o + // trailing '}' + newJson[jsonLength - 1] = ','; // Add a ',' + System.arraycopy(extraJSON, 1, newJson, jsonLength, extraLength - 1); + return newJson; + } + + /** + * Inserts the _id, _rev and _attachments properties into the JSON data and + * stores it in rev. Rev must already have its revID and sequence properties + * set. + */ + public Map extraPropertiesForRevision(TDRevision rev, + EnumSet contentOptions) { + + String docId = rev.getDocId(); + String revId = rev.getRevId(); + long sequenceNumber = rev.getSequence(); + assert (revId != null); + assert (sequenceNumber > 0); + + // Get attachment metadata, and optionally the contents: + boolean withAttachments = contentOptions + .contains(TDContentOptions.TDIncludeAttachments); + Map attachmentsDict = getAttachmentsDictForSequenceWithContent( + sequenceNumber, withAttachments); + + // Get more optional stuff to put in the properties: + // OPT: This probably ends up making redundant SQL queries if multiple + // options are enabled. + Long localSeq = null; + if (contentOptions.contains(TDContentOptions.TDIncludeLocalSeq)) { + localSeq = sequenceNumber; + } + + Map revHistory = null; + if (contentOptions.contains(TDContentOptions.TDIncludeRevs)) { + revHistory = getRevisionHistoryDict(rev); + } + + List revsInfo = null; + if (contentOptions.contains(TDContentOptions.TDIncludeRevsInfo)) { + revsInfo = new ArrayList(); + List revHistoryFull = getRevisionHistory(rev); + for (TDRevision historicalRev : revHistoryFull) { + Map revHistoryItem = new HashMap(); + String status = "available"; + if (historicalRev.isDeleted()) { + status = "deleted"; + } + // TODO: Detect missing revisions, set status="missing" + revHistoryItem.put("rev", historicalRev.getRevId()); + revHistoryItem.put("status", status); + revsInfo.add(revHistoryItem); + } + } + + List conflicts = null; + if (contentOptions.contains(TDContentOptions.TDIncludeConflicts)) { + TDRevisionList revs = getAllRevisionsOfDocumentID(docId, true); + if (revs.size() > 1) { + conflicts = new ArrayList(); + for (TDRevision historicalRev : revs) { + if (!historicalRev.equals(rev)) { + conflicts.add(historicalRev.getRevId()); + } + } + } + } + + Map result = new HashMap(); + result.put("_id", docId); + result.put("_rev", revId); + if (rev.isDeleted()) { + result.put("_deleted", true); + } + if (attachmentsDict != null) { + result.put("_attachments", attachmentsDict); + } + if (localSeq != null) { + result.put("_local_seq", localSeq); + } + if (revHistory != null) { + result.put("_revisions", revHistory); + } + if (revsInfo != null) { + result.put("_revs_info", revsInfo); + } + if (conflicts != null) { + result.put("_conflicts", conflicts); + } + + return result; + } + + /** + * Inserts the _id, _rev and _attachments properties into the JSON data and + * stores it in rev. Rev must already have its revID and sequence properties + * set. + */ + public void expandStoredJSONIntoRevisionWithAttachments(byte[] json, + TDRevision rev, EnumSet contentOptions) { + Map extra = extraPropertiesForRevision(rev, + contentOptions); + if (json != null) { + rev.setJson(appendDictToJSON(json, extra)); + } else { + rev.setProperties(extra); + } + } + + @SuppressWarnings("unchecked") + public Map documentPropertiesFromJSON(byte[] json, + String docId, String revId, long sequence, + EnumSet contentOptions) { + + TDRevision rev = new TDRevision(docId, revId, false); + rev.setSequence(sequence); + Map extra = extraPropertiesForRevision(rev, + contentOptions); + if (json == null) { + return extra; + } + + Map docProperties = null; + try { + docProperties = TDServer.getObjectMapper().readValue(json, + Map.class); + docProperties.putAll(extra); + return docProperties; + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error serializing properties to JSON", e); + } + + return docProperties; + } + + public TDRevision getDocumentWithIDAndRev(String id, String rev, + EnumSet contentOptions) { + TDRevision result = null; + String sql; + + Cursor cursor = null; + try { + cursor = null; + String cols = "revid, deleted, sequence"; + if (!contentOptions.contains(TDContentOptions.TDNoBody)) { + cols += ", json"; + } + if (rev != null) { + sql = "SELECT " + + cols + + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id AND revid=? LIMIT 1"; + String[] args = { id, rev }; + cursor = database.rawQuery(sql, args); + } else { + sql = "SELECT " + + cols + + " FROM revs, docs WHERE docs.docid=? AND revs.doc_id=docs.doc_id and current=1 and deleted=0 ORDER BY revid DESC LIMIT 1"; + String[] args = { id }; + cursor = database.rawQuery(sql, args); + } + + if (cursor.moveToFirst()) { + if (rev == null) { + rev = cursor.getString(0); + } + boolean deleted = (cursor.getInt(1) > 0); + result = new TDRevision(id, rev, deleted); + result.setSequence(cursor.getLong(2)); + if (!contentOptions.equals(EnumSet + .of(TDContentOptions.TDNoBody))) { + byte[] json = null; + if (!contentOptions.contains(TDContentOptions.TDNoBody)) { + json = cursor.getBlob(3); + } + expandStoredJSONIntoRevisionWithAttachments(json, result, + contentOptions); + } + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting document with id and rev", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + public boolean existsDocumentWithIDAndRev(String docId, String revId) { + return getDocumentWithIDAndRev(docId, revId, + EnumSet.of(TDContentOptions.TDNoBody)) != null; + } + + public TDStatus loadRevisionBody(TDRevision rev, + EnumSet contentOptions) { + if (rev.getBody() != null) { + return new TDStatus(TDStatus.OK); + } + assert ((rev.getDocId() != null) && (rev.getRevId() != null)); + + Cursor cursor = null; + TDStatus result = new TDStatus(TDStatus.NOT_FOUND); + try { + String sql = "SELECT sequence, json FROM revs, docs WHERE revid=? AND docs.docid=? AND revs.doc_id=docs.doc_id LIMIT 1"; + String[] args = { rev.getRevId(), rev.getDocId() }; + cursor = database.rawQuery(sql, args); + if (cursor.moveToFirst()) { + result.setCode(TDStatus.OK); + rev.setSequence(cursor.getLong(0)); + expandStoredJSONIntoRevisionWithAttachments(cursor.getBlob(1), + rev, contentOptions); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error loading revision body", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + public long getDocNumericID(String docId) { + Cursor cursor = null; + String[] args = { docId }; + + long result = -1; + try { + cursor = database.rawQuery("SELECT doc_id FROM docs WHERE docid=?", + args); + + if (cursor.moveToFirst()) { + result = cursor.getLong(0); + } else { + result = 0; + } + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error getting doc numeric id", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + /** HISTORY: **/ + + /** + * Returns all the known revisions (or all current/conflicting revisions) of + * a document. + */ + public TDRevisionList getAllRevisionsOfDocumentID(String docId, + long docNumericID, boolean onlyCurrent) { + + String sql = null; + if (onlyCurrent) { + sql = "SELECT sequence, revid, deleted FROM revs " + + "WHERE doc_id=? AND current ORDER BY sequence DESC"; + } else { + sql = "SELECT sequence, revid, deleted FROM revs " + + "WHERE doc_id=? ORDER BY sequence DESC"; + } + + String[] args = { Long.toString(docNumericID) }; + Cursor cursor = null; + + cursor = database.rawQuery(sql, args); + + TDRevisionList result; + try { + cursor.moveToFirst(); + result = new TDRevisionList(); + while (!cursor.isAfterLast()) { + TDRevision rev = new TDRevision(docId, cursor.getString(1), + (cursor.getInt(2) > 0)); + rev.setSequence(cursor.getLong(0)); + result.add(rev); + cursor.moveToNext(); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + public TDRevisionList getAllRevisionsOfDocumentID(String docId, + boolean onlyCurrent) { + long docNumericId = getDocNumericID(docId); + if (docNumericId < 0) { + return null; + } else if (docNumericId == 0) { + return new TDRevisionList(); + } else { + return getAllRevisionsOfDocumentID(docId, docNumericId, onlyCurrent); + } + } + + public List getConflictingRevisionIDsOfDocID(String docID) { + long docIdNumeric = getDocNumericID(docID); + if (docIdNumeric < 0) { + return null; + } + + List result = new ArrayList(); + Cursor cursor = null; + try { + String[] args = { Long.toString(docIdNumeric) }; + cursor = database.rawQuery( + "SELECT revid FROM revs WHERE doc_id=? AND current " + + "ORDER BY revid DESC OFFSET 1", args); + cursor.moveToFirst(); + while (!cursor.isAfterLast()) { + result.add(cursor.getString(0)); + cursor.moveToNext(); + } + + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + public String findCommonAncestorOf(TDRevision rev, List revIDs) { + String result = null; + + if (revIDs.size() == 0) + return null; + String docId = rev.getDocId(); + long docNumericID = getDocNumericID(docId); + if (docNumericID <= 0) + return null; + String quotedRevIds = joinQuoted(revIDs); + String sql = "SELECT revid FROM revs " + + "WHERE doc_id=? and revid in (" + quotedRevIds + + ") and revid <= ? " + "ORDER BY revid DESC LIMIT 1"; + String[] args = { Long.toString(docNumericID) }; + + Cursor cursor = null; + try { + cursor = database.rawQuery(sql, args); + cursor.moveToFirst(); + if (!cursor.isAfterLast()) { + result = cursor.getString(0); + } + + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting all revisions of document", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + /** + * Returns an array of TDRevs in reverse chronological order, starting with + * the given revision. + */ + public List getRevisionHistory(TDRevision rev) { + String docId = rev.getDocId(); + String revId = rev.getRevId(); + assert ((docId != null) && (revId != null)); + + long docNumericId = getDocNumericID(docId); + if (docNumericId < 0) { + return null; + } else if (docNumericId == 0) { + return new ArrayList(); + } + + String sql = "SELECT sequence, parent, revid, deleted FROM revs " + + "WHERE doc_id=? ORDER BY sequence DESC"; + String[] args = { Long.toString(docNumericId) }; + Cursor cursor = null; + + List result; + try { + cursor = database.rawQuery(sql, args); + + cursor.moveToFirst(); + long lastSequence = 0; + result = new ArrayList(); + while (!cursor.isAfterLast()) { + long sequence = cursor.getLong(0); + boolean matches = false; + if (lastSequence == 0) { + matches = revId.equals(cursor.getString(2)); + } else { + matches = (sequence == lastSequence); + } + if (matches) { + revId = cursor.getString(2); + boolean deleted = (cursor.getInt(3) > 0); + TDRevision aRev = new TDRevision(docId, revId, deleted); + aRev.setSequence(cursor.getLong(0)); + result.add(aRev); + lastSequence = cursor.getLong(1); + if (lastSequence == 0) { + break; + } + } + cursor.moveToNext(); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting revision history", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + // Splits a revision ID into its generation number and opaque suffix string + public static int parseRevIDNumber(String rev) { + int result = -1; + int dashPos = rev.indexOf("-"); + if (dashPos >= 0) { + try { + result = Integer.parseInt(rev.substring(0, dashPos)); + } catch (NumberFormatException e) { + // ignore, let it return -1 + } + } + return result; + } + + // Splits a revision ID into its generation number and opaque suffix string + public static String parseRevIDSuffix(String rev) { + String result = null; + int dashPos = rev.indexOf("-"); + if (dashPos >= 0) { + result = rev.substring(dashPos + 1); + } + return result; + } + + public static Map makeRevisionHistoryDict( + List history) { + if (history == null) { + return null; + } + + // Try to extract descending numeric prefixes: + List suffixes = new ArrayList(); + int start = -1; + int lastRevNo = -1; + for (TDRevision rev : history) { + int revNo = parseRevIDNumber(rev.getRevId()); + String suffix = parseRevIDSuffix(rev.getRevId()); + if (revNo > 0 && suffix.length() > 0) { + if (start < 0) { + start = revNo; + } else if (revNo != lastRevNo - 1) { + start = -1; + break; + } + lastRevNo = revNo; + suffixes.add(suffix); + } else { + start = -1; + break; + } + } + + Map result = new HashMap(); + if (start == -1) { + // we failed to build sequence, just stuff all the revs in list + suffixes = new ArrayList(); + for (TDRevision rev : history) { + suffixes.add(rev.getRevId()); + } + } else { + result.put("start", start); + } + result.put("ids", suffixes); + + return result; + } + + /** + * Returns the revision history as a _revisions dictionary, as returned by + * the REST API's ?revs=true option. + */ + public Map getRevisionHistoryDict(TDRevision rev) { + return makeRevisionHistoryDict(getRevisionHistory(rev)); + } + + public TDRevisionList changesSince(long lastSeq, TDChangesOptions options, + TDFilterBlock filter) { + // http://wiki.apache.org/couchdb/HTTP_database_API#Changes + if (options == null) { + options = new TDChangesOptions(); + } + + boolean includeDocs = options.isIncludeDocs() || (filter != null); + String additionalSelectColumns = ""; + if (includeDocs) { + additionalSelectColumns = ", json"; + } + + String sql = "SELECT sequence, revs.doc_id, docid, revid, deleted" + + additionalSelectColumns + " FROM revs, docs " + + "WHERE sequence > ? AND current=1 " + + "AND revs.doc_id = docs.doc_id " + + "ORDER BY revs.doc_id, revid DESC"; + String[] args = { Long.toString(lastSeq) }; + Cursor cursor = null; + TDRevisionList changes = null; + + try { + cursor = database.rawQuery(sql, args); + cursor.moveToFirst(); + changes = new TDRevisionList(); + long lastDocId = 0; + while (!cursor.isAfterLast()) { + if (!options.isIncludeConflicts()) { + // Only count the first rev for a given doc (the rest will + // be losing conflicts): + long docNumericId = cursor.getLong(1); + if (docNumericId == lastDocId) { + cursor.moveToNext(); + continue; + } + lastDocId = docNumericId; + } + + TDRevision rev = new TDRevision(cursor.getString(2), + cursor.getString(3), (cursor.getInt(4) > 0)); + rev.setSequence(cursor.getLong(0)); + if (includeDocs) { + expandStoredJSONIntoRevisionWithAttachments( + cursor.getBlob(5), rev, options.getContentOptions()); + } + if ((filter == null) || (filter.filter(rev))) { + changes.add(rev); + } + cursor.moveToNext(); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error looking for changes", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + + if (options.isSortBySequence()) { + changes.sortBySequence(); + } + changes.limit(options.getLimit()); + return changes; + } + + /** + * Define or clear a named filter function. + * + * These aren't used directly by TDDatabase, but they're looked up by + * TDRouter when a _changes request has a ?filter parameter. + */ + public void defineFilter(String filterName, TDFilterBlock filter) { + if (filters == null) { + filters = new HashMap(); + } + filters.put(filterName, filter); + } + + public TDFilterBlock getFilterNamed(String filterName) { + TDFilterBlock result = null; + if (filters != null) { + result = filters.get(filterName); + } + return result; + } + + /** VIEWS: **/ + + public TDView registerView(TDView view) { + if (view == null) { + return null; + } + if (views == null) { + views = new HashMap(); + } + views.put(view.getName(), view); + return view; + } + + public TDView getViewNamed(String name) { + TDView view = null; + if (views != null) { + view = views.get(name); + } + if (view != null) { + return view; + } + return registerView(new TDView(this, name)); + } + + public TDView getExistingViewNamed(String name) { + TDView view = null; + if (views != null) { + view = views.get(name); + } + if (view != null) { + return view; + } + view = new TDView(this, name); + if (view.getViewId() == 0) { + return null; + } + + return registerView(view); + } + + public List getAllViews() { + Cursor cursor = null; + List result = null; + + try { + cursor = database.rawQuery("SELECT name FROM views", null); + cursor.moveToFirst(); + result = new ArrayList(); + while (!cursor.isAfterLast()) { + result.add(getViewNamed(cursor.getString(0))); + cursor.moveToNext(); + } + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error getting all views", e); + } finally { + if (cursor != null) { + cursor.close(); + } + } + + return result; + } + + public TDStatus deleteViewNamed(String name) { + TDStatus result = new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + try { + String[] whereArgs = { name }; + int rowsAffected = database.delete("views", "name=?", whereArgs); + if (rowsAffected > 0) { + result.setCode(TDStatus.OK); + } else { + result.setCode(TDStatus.NOT_FOUND); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error deleting view", e); + } + return result; + } + + // FIX: This has a lot of code in common with -[TDView + // queryWithOptions:status:]. Unify the two! + public Map getDocsWithIDs(List docIDs, + TDQueryOptions options) { + if (options == null) { + options = new TDQueryOptions(); + } + + long updateSeq = 0; + if (options.isUpdateSeq()) { + updateSeq = getLastSequence(); // TODO: needs to be atomic with the + // following SELECT + } + + // Generate the SELECT statement, based on the options: + String additionalCols = ""; + if (options.isIncludeDocs()) { + additionalCols = ", json, sequence"; + } + String sql = "SELECT revs.doc_id, docid, revid, deleted" + + additionalCols + " FROM revs, docs WHERE"; + + if (docIDs != null) { + sql += " docid IN (" + joinQuoted(docIDs) + ")"; + } else { + sql += " deleted=0"; + } + + sql += " AND current=1 AND docs.doc_id = revs.doc_id"; + + List argsList = new ArrayList(); + Object minKey = options.getStartKey(); + Object maxKey = options.getEndKey(); + boolean inclusiveMin = true; + boolean inclusiveMax = options.isInclusiveEnd(); + if (options.isDescending()) { + minKey = maxKey; + maxKey = options.getStartKey(); + inclusiveMin = inclusiveMax; + inclusiveMax = true; + } + + if (minKey != null) { + assert (minKey instanceof String); + if (inclusiveMin) { + sql += " AND docid >= ?"; + } else { + sql += " AND docid > ?"; + } + argsList.add((String) minKey); + } + + if (maxKey != null) { + assert (maxKey instanceof String); + if (inclusiveMax) { + sql += " AND docid <= ?"; + } else { + sql += " AND docid < ?"; + } + argsList.add((String) maxKey); + } + + String order = "ASC"; + if (options.isDescending()) { + order = "DESC"; + } + + sql += " ORDER BY docid " + order + ", revid DESC LIMIT ? OFFSET ?"; + + argsList.add(Integer.toString(options.getLimit())); + argsList.add(Integer.toString(options.getSkip())); + Cursor cursor = null; + long lastDocID = 0; + List> rows = null; + + try { + cursor = database.rawQuery(sql, + argsList.toArray(new String[argsList.size()])); + + cursor.moveToFirst(); + rows = new ArrayList>(); + while (!cursor.isAfterLast()) { + long docNumericID = cursor.getLong(0); + if (docNumericID == lastDocID) { + cursor.moveToNext(); + continue; + } + lastDocID = docNumericID; + + String docId = cursor.getString(1); + String revId = cursor.getString(2); + Map docContents = null; + boolean deleted = cursor.getInt(3) > 0; + if (options.isIncludeDocs() && !deleted) { + byte[] json = cursor.getBlob(4); + long sequence = cursor.getLong(5); + docContents = documentPropertiesFromJSON(json, docId, + revId, sequence, options.getContentOptions()); + } + + Map valueMap = new HashMap(); + valueMap.put("rev", revId); + + Map change = new HashMap(); + change.put("id", docId); + change.put("key", docId); + change.put("value", valueMap); + if (docContents != null) { + change.put("doc", docContents); + } + if (deleted) { + change.put("deleted", true); + } + + rows.add(change); + + cursor.moveToNext(); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting all docs", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + + int totalRows = cursor.getCount(); // ??? Is this true, or does it + // ignore limit/offset? + Map result = new HashMap(); + result.put("rows", rows); + result.put("total_rows", totalRows); + result.put("offset", options.getSkip()); + if (updateSeq != 0) { + result.put("update_seq", updateSeq); + } + + return result; + } + + public Map getAllDocs(TDQueryOptions options) { + return getDocsWithIDs(null, options); + } + + /*************************************************************************************************/ + /*** TDDatabase+Attachments ***/ + /*************************************************************************************************/ + + public TDStatus insertAttachmentForSequenceWithNameAndType( + InputStream contentStream, long sequence, String name, + String contentType, int revpos) { + assert (sequence > 0); + assert (name != null); + + TDBlobKey key = new TDBlobKey(); + if (!attachments.storeBlobStream(contentStream, key)) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + byte[] keyData = key.getBytes(); + try { + ContentValues args = new ContentValues(); + args.put("sequence", sequence); + args.put("filename", name); + args.put("key", keyData); + args.put("type", contentType); + args.put("length", attachments.getSizeOfBlob(key)); + args.put("revpos", revpos); + database.insert("attachments", null, args); + return new TDStatus(TDStatus.CREATED); + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error inserting attachment", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + } + + public TDStatus copyAttachmentNamedFromSequenceToSequence(String name, + long fromSeq, long toSeq) { + assert (name != null); + assert (toSeq > 0); + if (fromSeq < 0) { + return new TDStatus(TDStatus.NOT_FOUND); + } + + Cursor cursor = null; + + String[] args = { Long.toString(toSeq), name, Long.toString(fromSeq), + name }; + try { + database.execSQL( + "INSERT INTO attachments (sequence, filename, key, type, length, revpos) " + + "SELECT ?, ?, key, type, length, revpos FROM attachments " + + "WHERE sequence=? AND filename=?", args); + cursor = database.rawQuery("SELECT changes()", null); + cursor.moveToFirst(); + int rowsUpdated = cursor.getInt(0); + if (rowsUpdated == 0) { + // Oops. This means a glitch in our attachment-management or + // pull code, + // or else a bug in the upstream server. + Log.w(TDDatabase.TAG, "Can't find inherited attachment " + name + + " from seq# " + Long.toString(fromSeq) + + " to copy to " + Long.toString(toSeq)); + return new TDStatus(TDStatus.NOT_FOUND); + } else { + return new TDStatus(TDStatus.OK); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error copying attachment", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + /** + * Returns the content and MIME type of an attachment + */ + public TDAttachment getAttachmentForSequence(long sequence, + String filename, TDStatus status) { + assert (sequence > 0); + assert (filename != null); + + Cursor cursor = null; + + String[] args = { Long.toString(sequence), filename }; + try { + cursor = database + .rawQuery( + "SELECT key, type FROM attachments WHERE sequence=? AND filename=?", + args); + + if (!cursor.moveToFirst()) { + status.setCode(TDStatus.NOT_FOUND); + return null; + } + + byte[] keyData = cursor.getBlob(0); + // TODO add checks on key here? (ios version) + TDBlobKey key = new TDBlobKey(keyData); + InputStream contentStream = attachments.blobStreamForKey(key); + if (contentStream == null) { + Log.e(TDDatabase.TAG, "Failed to load attachment"); + status.setCode(TDStatus.INTERNAL_SERVER_ERROR); + return null; + } else { + status.setCode(TDStatus.OK); + TDAttachment result = new TDAttachment(); + result.setContentStream(contentStream); + result.setContentType(cursor.getString(1)); + return result; + } + + } catch (SQLException e) { + status.setCode(TDStatus.INTERNAL_SERVER_ERROR); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + + } + + /** + * Constructs an "_attachments" dictionary for a revision, to be inserted in + * its JSON body. + */ + public Map getAttachmentsDictForSequenceWithContent( + long sequence, boolean withContent) { + assert (sequence > 0); + + Cursor cursor = null; + + String args[] = { Long.toString(sequence) }; + try { + cursor = database + .rawQuery( + "SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?", + args); + + if (!cursor.moveToFirst()) { + return null; + } + + Map result = new HashMap(); + + while (!cursor.isAfterLast()) { + + byte[] keyData = cursor.getBlob(1); + TDBlobKey key = new TDBlobKey(keyData); + String digestString = "sha1-" + Base64.encodeBytes(keyData); + String dataBase64 = null; + if (withContent) { + byte[] data = attachments.blobForKey(key); + if (data != null) { + dataBase64 = Base64.encodeBytes(data); + } else { + Log.w(TDDatabase.TAG, "Error loading attachment"); + } + } + + Map attachment = new HashMap(); + if (dataBase64 == null) { + attachment.put("stub", true); + } else { + attachment.put("data", dataBase64); + } + attachment.put("digest", digestString); + attachment.put("content_type", cursor.getString(2)); + attachment.put("length", cursor.getInt(3)); + attachment.put("revpos", cursor.getInt(4)); + + result.put(cursor.getString(0), attachment); + + cursor.moveToNext(); + } + + return result; + + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting attachments for sequence", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + /** + * Modifies a TDRevision's body by changing all attachments with revpos < + * minRevPos into stubs. + * + * @param rev + * @param minRevPos + */ + public void stubOutAttachmentsIn(TDRevision rev, int minRevPos) { + if (minRevPos <= 1) { + return; + } + Map properties = (Map) rev + .getProperties(); + Map attachments = null; + if (properties != null) { + attachments = (Map) properties.get("_attachments"); + } + Map editedProperties = null; + Map editedAttachments = null; + for (String name : attachments.keySet()) { + Map attachment = (Map) attachments + .get(name); + int revPos = (Integer) attachment.get("revpos"); + Object stub = attachment.get("stub"); + if (revPos > 0 && revPos < minRevPos && (stub == null)) { + // Strip this attachment's body. First make its dictionary + // mutable: + if (editedProperties == null) { + editedProperties = new HashMap(properties); + editedAttachments = new HashMap(attachments); + editedProperties.put("_attachments", editedAttachments); + } + // ...then remove the 'data' and 'follows' key: + Map editedAttachment = new HashMap( + attachment); + editedAttachment.remove("data"); + editedAttachment.remove("follows"); + editedAttachment.put("stub", true); + editedAttachments.put(name, editedAttachment); + Log.d(TDDatabase.TAG, "Stubbed out attachment" + rev + " " + + name + ": revpos" + revPos + " " + minRevPos); + } + } + if (editedProperties != null) + rev.setProperties(editedProperties); + } + + /** + * Given a newly-added revision, adds the necessary attachment rows to the + * database and stores inline attachments into the blob store. + */ + public TDStatus processAttachmentsForRevision(TDRevision rev, + long parentSequence) { + assert (rev != null); + long newSequence = rev.getSequence(); + assert (newSequence > parentSequence); + + // If there are no attachments in the new rev, there's nothing to do: + Map newAttachments = null; + Map properties = (Map) rev + .getProperties(); + if (properties != null) { + newAttachments = (Map) properties + .get("_attachments"); + } + if (newAttachments == null || newAttachments.size() == 0 + || rev.isDeleted()) { + return new TDStatus(TDStatus.OK); + } + + for (String name : newAttachments.keySet()) { + + TDStatus status = new TDStatus(); + Map newAttach = (Map) newAttachments + .get(name); + String newContentBase64 = (String) newAttach.get("data"); + if (newContentBase64 != null) { + // New item contains data, so insert it. First decode the data: + byte[] newContents; + try { + newContents = Base64.decode(newContentBase64); + } catch (IOException e) { + Log.e(TDDatabase.TAG, "IOExeption parsing base64", e); + return new TDStatus(TDStatus.BAD_REQUEST); + } + if (newContents == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + // Now determine the revpos, i.e. generation # this was added + // in. Usually this is + // implicit, but a rev being pulled in replication will have it + // set already. + int generation = rev.getGeneration(); + assert (generation > 0); + Object revposObj = newAttach.get("revpos"); + int revpos = generation; + if (revposObj != null && revposObj instanceof Integer) { + revpos = ((Integer) revposObj).intValue(); + } + + if (revpos > generation) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + // Finally insert the attachment: + status = insertAttachmentForSequenceWithNameAndType( + new ByteArrayInputStream(newContents), newSequence, + name, (String) newAttach.get("content_type"), revpos); + } else { + // It's just a stub, so copy the previous revision's attachment + // entry: + // ? Should I enforce that the type and digest (if any) match? + status = copyAttachmentNamedFromSequenceToSequence(name, + parentSequence, newSequence); + } + if (!status.isSuccessful()) { + return status; + } + } + + return new TDStatus(TDStatus.OK); + } + + /** + * Updates or deletes an attachment, creating a new document revision in the + * process. Used by the PUT / DELETE methods called on attachment URLs. + */ + public TDRevision updateAttachment(String filename, + InputStream contentStream, String contentType, String docID, + String oldRevID, TDStatus status) { + status.setCode(TDStatus.BAD_REQUEST); + if (filename == null || filename.length() == 0 + || (contentStream != null && contentType == null) + || (oldRevID != null && docID == null) + || (contentStream != null && docID == null)) { + return null; + } + + beginTransaction(); + try { + TDRevision oldRev = new TDRevision(docID, oldRevID, false); + if (oldRevID != null) { + // Load existing revision if this is a replacement: + TDStatus loadStatus = loadRevisionBody(oldRev, + EnumSet.noneOf(TDContentOptions.class)); + status.setCode(loadStatus.getCode()); + if (!status.isSuccessful()) { + if (status.getCode() == TDStatus.NOT_FOUND + && existsDocumentWithIDAndRev(docID, null)) { + status.setCode(TDStatus.CONFLICT); // if some other + // revision exists, + // it's a conflict + } + return null; + } + + Map attachments = (Map) oldRev + .getProperties().get("_attachments"); + if (contentStream == null && attachments != null + && !attachments.containsKey(filename)) { + status.setCode(TDStatus.NOT_FOUND); + return null; + } + // Remove the _attachments stubs so putRevision: doesn't copy + // the rows for me + // OPT: Would be better if I could tell loadRevisionBody: not to + // add it + if (attachments != null) { + Map properties = new HashMap( + oldRev.getProperties()); + properties.remove("_attachments"); + oldRev.setBody(new TDBody(properties)); + } + } else { + // If this creates a new doc, it needs a body: + oldRev.setBody(new TDBody(new HashMap())); + } + + // Create a new revision: + TDRevision newRev = putRevision(oldRev, oldRevID, false, status); + if (newRev == null) { + return null; + } + + if (oldRevID != null) { + // Copy all attachment rows _except_ for the one being updated: + String[] args = { Long.toString(newRev.getSequence()), + Long.toString(oldRev.getSequence()), filename }; + database.execSQL( + "INSERT INTO attachments " + + "(sequence, filename, key, type, length, revpos) " + + "SELECT ?, filename, key, type, length, revpos FROM attachments " + + "WHERE sequence=? AND filename != ?", args); + } + + if (contentStream != null) { + // If not deleting, add a new attachment entry: + TDStatus insertStatus = insertAttachmentForSequenceWithNameAndType( + contentStream, newRev.getSequence(), filename, + contentType, newRev.getGeneration()); + status.setCode(insertStatus.getCode()); + + if (!status.isSuccessful()) { + return null; + } + } + + status.setCode((contentStream != null) ? TDStatus.CREATED + : TDStatus.OK); + return newRev; + + } catch (SQLException e) { + Log.e(TAG, "Error uploading attachment", e); + status.setCode(TDStatus.INTERNAL_SERVER_ERROR); + return null; + } finally { + endTransaction(status.isSuccessful()); + } + } + + /** + * Deletes obsolete attachments from the database and blob store. + */ + public TDStatus garbageCollectAttachments() { + // First delete attachment rows for already-cleared revisions: + // OPT: Could start after last sequence# we GC'd up to + + try { + database.execSQL("DELETE FROM attachments WHERE sequence IN " + + "(SELECT sequence from revs WHERE json IS null)"); + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error deleting attachments", e); + } + + // Now collect all remaining attachment IDs and tell the store to delete + // all but these: + Cursor cursor = null; + try { + cursor = database.rawQuery("SELECT DISTINCT key FROM attachments", + null); + + cursor.moveToFirst(); + List allKeys = new ArrayList(); + while (!cursor.isAfterLast()) { + TDBlobKey key = new TDBlobKey(cursor.getBlob(0)); + allKeys.add(key); + cursor.moveToNext(); + } + + int numDeleted = attachments.deleteBlobsExceptWithKeys(allKeys); + if (numDeleted < 0) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + Log.v(TDDatabase.TAG, "Deleted " + numDeleted + " attachments"); + + return new TDStatus(TDStatus.OK); + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error finding attachment keys in use", e); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + /*************************************************************************************************/ + /*** TDDatabase+Insertion ***/ + /*************************************************************************************************/ + + /** DOCUMENT & REV IDS: **/ + + public static boolean isValidDocumentId(String id) { + // http://wiki.apache.org/couchdb/HTTP_Document_API#Documents + if (id == null || id.length() == 0) { + return false; + } + if (id.charAt(0) == '_') { + return (id.startsWith("_design/")); + } + return true; + // "_local/*" is not a valid document ID. Local docs have their own API + // and shouldn't get here. + } + + public static String generateDocumentId() { + return TDMisc.TDCreateUUID(); + } + + public String generateNextRevisionID(String revisionId) { + // Revision IDs have a generation count, a hyphen, and a UUID. + int generation = 0; + if (revisionId != null) { + generation = TDRevision.generationFromRevID(revisionId); + if (generation == 0) { + return null; + } + } + String digest = TDMisc.TDCreateUUID(); // TODO: Generate canonical + // digest of body + return Integer.toString(generation + 1) + "-" + digest; + } + + public long insertDocumentID(String docId) { + long rowId = -1; + try { + ContentValues args = new ContentValues(); + args.put("docid", docId); + rowId = database.insert("docs", null, args); + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error inserting document id", e); + } + return rowId; + } + + public long getOrInsertDocNumericID(String docId) { + long docNumericId = getDocNumericID(docId); + if (docNumericId == 0) { + docNumericId = insertDocumentID(docId); + } + return docNumericId; + } + + /** + * Parses the _revisions dict from a document into an array of revision ID + * strings + */ + public static List parseCouchDBRevisionHistory( + Map docProperties) { + Map revisions = (Map) docProperties + .get("_revisions"); + if (revisions == null) { + return null; + } + List revIDs = (List) revisions.get("ids"); + Integer start = (Integer) revisions.get("start"); + if (start != null) { + for (int i = 0; i < revIDs.size(); i++) { + String revID = revIDs.get(i); + revIDs.set(i, Integer.toString(start--) + "-" + revID); + } + } + return revIDs; + } + + /** INSERTION: **/ + + public byte[] encodeDocumentJSON(TDRevision rev) { + + Map origProps = rev.getProperties(); + if (origProps == null) { + return null; + } + + // Don't allow any "_"-prefixed keys. Known ones we'll ignore, unknown + // ones are an error. + Map properties = new HashMap( + origProps.size()); + for (String key : origProps.keySet()) { + if (key.startsWith("_")) { + if (!KNOWN_SPECIAL_KEYS.contains(key)) { + Log.e(TAG, "TDDatabase: Invalid top-level key '" + key + + "' in document to be inserted"); + return null; + } + } else { + properties.put(key, origProps.get(key)); + } + } + + byte[] json = null; + try { + json = TDServer.getObjectMapper().writeValueAsBytes(properties); + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error serializing " + rev + " to JSON", e); + } + return json; + } + + public void notifyChange(TDRevision rev, String source) { + Map changeNotification = new HashMap(); + changeNotification.put("rev", rev); + changeNotification.put("seq", rev.getSequence()); + if (source != null) { + changeNotification.put("source", source); + } + setChanged(); + notifyObservers(changeNotification); + } + + public long insertRevision(TDRevision rev, long docNumericID, + long parentSequence, boolean current, byte[] data) { + long rowId = 0; + try { + ContentValues args = new ContentValues(); + args.put("doc_id", docNumericID); + args.put("revid", rev.getRevId()); + if (parentSequence != 0) { + args.put("parent", parentSequence); + } + args.put("current", current); + args.put("deleted", rev.isDeleted()); + args.put("json", data); + rowId = database.insert("revs", null, args); + rev.setSequence(rowId); + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error inserting revision", e); + } + return rowId; + } + + private TDRevision putRevision(TDRevision rev, String prevRevId, + TDStatus resultStatus) { + return putRevision(rev, prevRevId, false, resultStatus); + } + + /** + * Stores a new (or initial) revision of a document. + * + * This is what's invoked by a PUT or POST. As with those, the previous + * revision ID must be supplied when necessary and the call will fail if it + * doesn't match. + * + * @param rev + * The revision to add. If the docID is null, a new UUID will be + * assigned. Its revID must be null. It must have a JSON body. + * @param prevRevId + * The ID of the revision to replace (same as the "?rev=" + * parameter to a PUT), or null if this is a new document. + * @param allowConflict + * If false, an error status 409 will be returned if the + * insertion would create a conflict, i.e. if the previous + * revision already has a child. + * @param resultStatus + * On return, an HTTP status code indicating success or failure. + * @return A new TDRevision with the docID, revID and sequence filled in + * (but no body). + */ + @SuppressWarnings("unchecked") + public TDRevision putRevision(TDRevision rev, String prevRevId, + boolean allowConflict, TDStatus resultStatus) { + // prevRevId is the rev ID being replaced, or nil if an insert + String docId = rev.getDocId(); + boolean deleted = rev.isDeleted(); + if ((rev == null) || ((prevRevId != null) && (docId == null)) + || (deleted && (docId == null)) + || ((docId != null) && !isValidDocumentId(docId))) { + resultStatus.setCode(TDStatus.BAD_REQUEST); + return null; + } + + resultStatus.setCode(TDStatus.INTERNAL_SERVER_ERROR); + beginTransaction(); + Cursor cursor = null; + + // // PART I: In which are performed lookups and validations prior to + // the insert... + + long docNumericID = (docId != null) ? getDocNumericID(docId) : 0; + long parentSequence = 0; + try { + if (prevRevId != null) { + // Replacing: make sure given prevRevID is current & find its + // sequence number: + if (docNumericID <= 0) { + resultStatus.setCode(TDStatus.NOT_FOUND); + return null; + } + + String[] args = { Long.toString(docNumericID), prevRevId }; + String additionalWhereClause = ""; + if (!allowConflict) { + additionalWhereClause = "AND current=1"; + } + + cursor = database.rawQuery( + "SELECT sequence FROM revs WHERE doc_id=? AND revid=? " + + additionalWhereClause + " LIMIT 1", args); + + if (cursor.moveToFirst()) { + parentSequence = cursor.getLong(0); + } + + if (parentSequence == 0) { + // Not found: either a 404 or a 409, depending on whether + // there is any current revision + if (!allowConflict + && existsDocumentWithIDAndRev(docId, null)) { + resultStatus.setCode(TDStatus.CONFLICT); + return null; + } else { + resultStatus.setCode(TDStatus.NOT_FOUND); + return null; + } + } + + if (validations != null && validations.size() > 0) { + // Fetch the previous revision and validate the new one + // against it: + TDRevision prevRev = new TDRevision(docId, prevRevId, false); + TDStatus status = validateRevision(rev, prevRev); + if (!status.isSuccessful()) { + resultStatus.setCode(status.getCode()); + return null; + } + } + + // Make replaced rev non-current: + ContentValues updateContent = new ContentValues(); + updateContent.put("current", 0); + database.update("revs", updateContent, "sequence=" + + parentSequence, null); + } else { + // Inserting first revision. + if (deleted && (docId != null)) { + // Didn't specify a revision to delete: 404 or a 409, + // depending + if (existsDocumentWithIDAndRev(docId, null)) { + resultStatus.setCode(TDStatus.CONFLICT); + return null; + } else { + resultStatus.setCode(TDStatus.NOT_FOUND); + return null; + } + } + + // Validate: + TDStatus status = validateRevision(rev, null); + if (!status.isSuccessful()) { + resultStatus.setCode(status.getCode()); + return null; + } + + if (docId != null) { + // Inserting first revision, with docID given (PUT): + if (docNumericID <= 0) { + // Doc doesn't exist at all; create it: + docNumericID = insertDocumentID(docId); + if (docNumericID <= 0) { + return null; + } + } else { + // Doc exists; check whether current winning revision is + // deleted: + String[] args = { Long.toString(docNumericID) }; + cursor = database + .rawQuery( + "SELECT sequence, deleted FROM revs WHERE doc_id=? and current=1 ORDER BY revid DESC LIMIT 1", + args); + + if (cursor.moveToFirst()) { + boolean wasAlreadyDeleted = (cursor.getInt(1) > 0); + if (wasAlreadyDeleted) { + // Make the deleted revision no longer current: + ContentValues updateContent = new ContentValues(); + updateContent.put("current", 0); + database.update("revs", updateContent, + "sequence=" + cursor.getLong(0), null); + } else if (!allowConflict) { + // docId already exists, current not deleted, + // conflict + resultStatus.setCode(TDStatus.CONFLICT); + return null; + } + } + } + } else { + // Inserting first revision, with no docID given (POST): + // generate a unique docID: + docId = TDDatabase.generateDocumentId(); + docNumericID = insertDocumentID(docId); + if (docNumericID <= 0) { + return null; + } + } + } + + // // PART II: In which insertion occurs... + + // Bump the revID and update the JSON: + String newRevId = generateNextRevisionID(prevRevId); + byte[] data = null; + if (!rev.isDeleted()) { + data = encodeDocumentJSON(rev); + if (data == null) { + // bad or missing json + resultStatus.setCode(TDStatus.BAD_REQUEST); + return null; + } + } + + rev = rev.copyWithDocID(docId, newRevId); + + // Now insert the rev itself: + long newSequence = insertRevision(rev, docNumericID, + parentSequence, true, data); + if (newSequence == 0) { + return null; + } + + // Store any attachments: + if (attachments != null) { + TDStatus status = processAttachmentsForRevision(rev, + parentSequence); + if (!status.isSuccessful()) { + resultStatus.setCode(status.getCode()); + return null; + } + } + + // Success! + if (deleted) { + resultStatus.setCode(TDStatus.OK); + } else { + resultStatus.setCode(TDStatus.CREATED); + } + + } catch (SQLException e1) { + Log.e(TDDatabase.TAG, "Error putting revision", e1); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + endTransaction(resultStatus.isSuccessful()); + } + + // // EPILOGUE: A change notification is sent... + notifyChange(rev, null); + return rev; + } + + /** + * Inserts an already-existing revision replicated from a remote database. + * + * It must already have a revision ID. This may create a conflict! The + * revision's history must be given; ancestor revision IDs that don't + * already exist locally will create phantom revisions with no content. + */ + public TDStatus forceInsert(TDRevision rev, List revHistory, + String source) { + + String docId = rev.getDocId(); + String revId = rev.getRevId(); + if (!isValidDocumentId(docId) || (revId == null)) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + int historyCount = revHistory.size(); + if (historyCount == 0) { + revHistory = new ArrayList(); + revHistory.add(revId); + historyCount = 1; + } else if (!revHistory.get(0).equals(rev.getRevId())) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + boolean success = false; + beginTransaction(); + try { + // First look up all locally-known revisions of this document: + long docNumericID = getOrInsertDocNumericID(docId); + TDRevisionList localRevs = getAllRevisionsOfDocumentID(docId, + docNumericID, false); + if (localRevs == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + // Walk through the remote history in chronological order, matching + // each revision ID to + // a local revision. When the list diverges, start creating blank + // local revisions to fill + // in the local history: + long sequence = 0; + long localParentSequence = 0; + for (int i = revHistory.size() - 1; i >= 0; --i) { + revId = revHistory.get(i); + TDRevision localRev = localRevs.revWithDocIdAndRevId(docId, + revId); + if (localRev != null) { + // This revision is known locally. Remember its sequence as + // the parent of the next one: + sequence = localRev.getSequence(); + assert (sequence > 0); + localParentSequence = sequence; + } else { + // This revision isn't known, so add it: + TDRevision newRev; + byte[] data = null; + boolean current = false; + if (i == 0) { + // Hey, this is the leaf revision we're inserting: + newRev = rev; + if (!rev.isDeleted()) { + data = encodeDocumentJSON(rev); + if (data == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + } + current = true; + } else { + // It's an intermediate parent, so insert a stub: + newRev = new TDRevision(docId, revId, false); + } + + // Insert it: + sequence = insertRevision(newRev, docNumericID, sequence, + current, data); + + if (sequence <= 0) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + if (i == 0) { + // Write any changed attachments for the new revision: + TDStatus status = processAttachmentsForRevision(rev, + localParentSequence); + if (!status.isSuccessful()) { + return status; + } + } + } + } + + // Mark the latest local rev as no longer current: + if (localParentSequence > 0 && localParentSequence != sequence) { + ContentValues args = new ContentValues(); + args.put("current", 0); + String[] whereArgs = { Long.toString(localParentSequence) }; + try { + database.update("revs", args, "sequence=?", whereArgs); + } catch (SQLException e) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + } + + success = true; + } catch (SQLException e) { + endTransaction(success); + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } finally { + endTransaction(success); + } + + // Notify and return: + notifyChange(rev, source); + return new TDStatus(TDStatus.CREATED); + } + + /** VALIDATION **/ + + /** + * Define or clear a named document validation function. + */ + public void defineValidation(String name, TDValidationBlock validationBlock) { + if (validations == null) { + validations = new HashMap(); + } + validations.put(name, validationBlock); + } + + public TDValidationBlock getValidationNamed(String name) { + TDValidationBlock result = null; + if (validations != null) { + result = validations.get(name); + } + return result; + } + + public TDStatus validateRevision(TDRevision newRev, TDRevision oldRev) { + TDStatus result = new TDStatus(TDStatus.OK); + if (validations == null || validations.size() == 0) { + return result; + } + TDValidationContextImpl context = new TDValidationContextImpl(this, + oldRev); + for (String validationName : validations.keySet()) { + TDValidationBlock validation = getValidationNamed(validationName); + if (!validation.validate(newRev, context)) { + result.setCode(context.getErrorType().getCode()); + break; + } + } + return result; + } + + /*************************************************************************************************/ + /*** TDDatabase+Replication ***/ + /*************************************************************************************************/ + + // TODO implement missing replication methods + + public List getActiveReplicators() { + return activeReplicators; + } + + public TDReplicator getActiveReplicator(URL remote, boolean push) { + if (activeReplicators != null) { + for (TDReplicator replicator : activeReplicators) { + if (replicator.getRemote().equals(remote) + && replicator.isPush() == push + && replicator.isRunning()) { + return replicator; + } + } + } + return null; + } + + public TDReplicator getReplicator(URL remote, boolean push, + String access_token, Map headers, boolean continuous, + ScheduledExecutorService workExecutor) { + TDReplicator replicator = getReplicator(remote, null, push, + access_token, headers, continuous, workExecutor); + return replicator; + } + + public TDReplicator getReplicator(URL remote, + HttpClientFactory httpClientFactory, boolean push, + String access_token, Map headers, + boolean continuous, ScheduledExecutorService workExecutor) { + TDReplicator result = getActiveReplicator(remote, push); + if (result != null) { + return result; + } + result = push ? new TDPusher(this, remote, access_token, headers, + continuous, httpClientFactory, workExecutor) : new TDPuller( + this, remote, access_token, headers, continuous, + httpClientFactory, workExecutor); + + if (activeReplicators == null) { + activeReplicators = new ArrayList(); + } + activeReplicators.add(result); + return result; + } + + public TDRevisionList getPendingRevisions(URL url, boolean push, + long lastUpdated) { + Log.d("ARTOOPULLER", String.format("Called with %d", lastUpdated)); + TDRevisionList result = new TDRevisionList(); + String[] args = { + url.toExternalForm(), + Integer.toString(push ? 1 : 0), + "" + + ((lastUpdated == -1 ? new Date().getTime() + : lastUpdated) - (60 * 1000 * 1)) }; + Cursor cursor = database + .rawQuery( + "SELECT docid, revid, deleted, sequence, lastUpdated FROM replicator_log WHERE remote=? AND push=? AND lastUpdated < ? LIMIT 100", + args); + if (cursor.moveToFirst()) { + do { + TDRevision rev = new TDRevision(cursor.getString(0), + cursor.getString(1), cursor.getInt(2) == 1 ? true + : false); + rev.setSequence(cursor.getLong(3)); + result.add(rev); + + // Log.d("ARTOOPULLER", String.format( + // "lastUpdated:%d; for doc %s, %d. comparision %d sec", + // lastUpdated, cursor.getString(0), cursor.getLong(4), + // (lastUpdated - cursor.getLong(4)) / 1000)); + } while (cursor.moveToNext()); + } + + if (cursor != null) { + cursor.close(); + } + return result; + } + + public Map getPendingRevisionStats() { + + Cursor cursor = database + .rawQuery( + "SELECT push, count(*) FROM replicator_log group by push", + null); + HashMap map = new HashMap(); + map.put("push", 0); + map.put("pull", 0); + + if (cursor.moveToFirst()) { + do { + if (cursor.getInt(0) == 0) { + map.put("pull", cursor.getInt(1)); + } else { + map.put("push", cursor.getInt(1)); + } + } while (cursor.moveToNext()); + } + + if (cursor != null) { + cursor.close(); + } + return map; + } + + public boolean logRevision(URL url, boolean push, TDRevision rev) { + boolean success = false; + Object[] args = { url.toExternalForm(), Integer.toString(push ? 1 : 0), + rev.getDocId(), rev.getRevId(), (rev.isDeleted() ? 1 : 0), + rev.getSequence() }; + Cursor cursor = null; + try { + database.execSQL( + "INSERT INTO replicator_log(remote, push, docid, revid, deleted, sequence, lastUpdated) VALUES(?,?,?,?,?,?,0)", + args); + cursor = database.rawQuery("SELECT changes()", null); + if (cursor.moveToFirst() && cursor.getInt(0) > 0) { + success = true; + } + } catch (SQLiteConstraintException e) { + // Trying to log a revision that is already present + // Do nothing + success = true; + } finally { + if (cursor != null) { + cursor.close(); + } + } + return success; + } + + public void updateLogRevision(URL url, boolean push, TDRevision rev, + long lastUpdated) { + Object[] args = { "" + lastUpdated, url.toExternalForm(), + Integer.toString(push ? 1 : 0), rev.getDocId(), rev.getRevId() }; + + Cursor cursor = null; + try { + database.execSQL( + "UPDATE replicator_log SET lastUpdated = ? WHERE remote=? AND push=? AND docid=? AND revid=?", + args); + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + public void removeLogForRevision(URL url, boolean push, TDRevision rev) { + Object[] args = { url.toExternalForm(), Integer.toString(push ? 1 : 0), + rev.getDocId(), rev.getRevId() }; + Cursor cursor = null; + try { + database.execSQL( + "DELETE FROM replicator_log WHERE remote=? AND push=? AND docid=? AND revid=?", + args); + cursor = database.rawQuery("SELECT changes()", null); + if (cursor.moveToFirst() && cursor.getInt(0) > 0) { + // success = true; + } + } catch (SQLiteConstraintException e) { + // Trying to log a revision that is already present + // Do nothing + // success = true; + } finally { + if (cursor != null) { + cursor.close(); + } + } + // return success; + } + + public void resetRevisions(URL url, boolean push) { + Object[] args = { url.toExternalForm(), Integer.toString(push ? 1 : 0) }; + + Cursor cursor = null; + try { + database.execSQL( + "UPDATE replicator_log SET lastUpdated = 0 WHERE remote=? AND push=?", + args); + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + public String lastSequenceWithRemoteURL(URL url, boolean push) { + Cursor cursor = null; + String result = null; + try { + String[] args = { url.toExternalForm(), + Integer.toString(push ? 1 : 0) }; + cursor = database + .rawQuery( + "SELECT last_sequence FROM replicators WHERE remote=? AND push=?", + args); + if (cursor.moveToFirst()) { + result = cursor.getString(0); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting last sequence", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + return result; + } + + public boolean setLastSequence(String lastSequence, URL url, boolean push) { + ContentValues values = new ContentValues(); + values.put("remote", url.toExternalForm()); + values.put("push", push); + values.put("last_sequence", lastSequence); + long newId = database.insertWithOnConflict("replicators", null, values, + SQLiteDatabase.CONFLICT_REPLACE); + return (newId == -1); + } + + public static String quote(String string) { + return string.replace("'", "''"); + } + + public static String joinQuoted(List strings) { + if (strings.size() == 0) { + return ""; + } + + String result = "'"; + boolean first = true; + for (String string : strings) { + if (first) { + first = false; + } else { + result = result + "','"; + } + result = result + quote(string); + } + result = result + "'"; + + return result; + } + + public TDRevisionList findMissingRevisions(TDRevisionList touchRevs) { + TDRevisionList removalList = new TDRevisionList(); + if (touchRevs.size() == 0) { + return removalList; + } + + String quotedDocIds = joinQuoted(touchRevs.getAllDocIds()); + String quotedRevIds = joinQuoted(touchRevs.getAllRevIds()); + + String sql = "SELECT docid, revid FROM revs, docs " + + "WHERE docid IN (" + quotedDocIds + ") AND revid in (" + + quotedRevIds + ")" + " AND revs.doc_id == docs.doc_id"; + + Cursor cursor = null; + try { + cursor = database.rawQuery(sql, null); + cursor.moveToFirst(); + while (!cursor.isAfterLast()) { + TDRevision rev = touchRevs.revWithDocIdAndRevId( + cursor.getString(0), cursor.getString(1)); + + if (rev != null) { + touchRevs.remove(rev); + removalList.add(rev); + } + + cursor.moveToNext(); + } + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error finding missing revisions", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + return removalList; + } + + /*************************************************************************************************/ + /*** TDDatabase+LocalDocs ***/ + /*************************************************************************************************/ + + public TDRevision getLocalDocument(String docID, String revID) { + TDRevision result = null; + Cursor cursor = null; + try { + String[] args = { docID }; + cursor = database.rawQuery( + "SELECT revid, json FROM localdocs WHERE docid=?", args); + if (cursor.moveToFirst()) { + String gotRevID = cursor.getString(0); + if (revID != null && (!revID.equals(gotRevID))) { + return null; + } + byte[] json = cursor.getBlob(1); + Map properties = null; + try { + properties = TDServer.getObjectMapper().readValue(json, + Map.class); + properties.put("_id", docID); + properties.put("_rev", gotRevID); + result = new TDRevision(docID, gotRevID, false); + result.setProperties(properties); + } catch (Exception e) { + Log.w(TAG, "Error parsing local doc JSON", e); + return null; + } + + } + return result; + } catch (SQLException e) { + Log.e(TDDatabase.TAG, "Error getting local document", e); + return null; + } finally { + if (cursor != null) { + cursor.close(); + } + } + } + + public TDRevision putLocalRevision(TDRevision revision, String prevRevID, + TDStatus status) { + String docID = revision.getDocId(); + if (!docID.startsWith("_local/")) { + status.setCode(TDStatus.BAD_REQUEST); + return null; + } + + if (!revision.isDeleted()) { + // PUT: + byte[] json = encodeDocumentJSON(revision); + String newRevID; + if (prevRevID != null) { + int generation = TDRevision.generationFromRevID(prevRevID); + if (generation == 0) { + status.setCode(TDStatus.BAD_REQUEST); + return null; + } + newRevID = Integer.toString(++generation) + "-local"; + ContentValues values = new ContentValues(); + values.put("revid", newRevID); + values.put("json", json); + String[] whereArgs = { docID, prevRevID }; + try { + int rowsUpdated = database.update("localdocs", values, + "docid=? AND revid=?", whereArgs); + if (rowsUpdated == 0) { + status.setCode(TDStatus.CONFLICT); + return null; + } + } catch (SQLException e) { + status.setCode(TDStatus.INTERNAL_SERVER_ERROR); + return null; + } + } else { + newRevID = "1-local"; + ContentValues values = new ContentValues(); + values.put("docid", docID); + values.put("revid", newRevID); + values.put("json", json); + try { + database.insertWithOnConflict("localdocs", null, values, + SQLiteDatabase.CONFLICT_IGNORE); + } catch (SQLException e) { + status.setCode(TDStatus.INTERNAL_SERVER_ERROR); + return null; + } + } + status.setCode(TDStatus.CREATED); + return revision.copyWithDocID(docID, newRevID); + } else { + // DELETE: + TDStatus deleteStatus = deleteLocalDocument(docID, prevRevID); + status.setCode(deleteStatus.getCode()); + return (status.isSuccessful()) ? revision : null; + } + } + + public TDStatus deleteLocalDocument(String docID, String revID) { + if (docID == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + if (revID == null) { + // Didn't specify a revision to delete: 404 or a 409, depending + return (getLocalDocument(docID, null) != null) ? new TDStatus( + TDStatus.CONFLICT) : new TDStatus(TDStatus.NOT_FOUND); + } + String[] whereArgs = { docID, revID }; + try { + int rowsDeleted = database.delete("localdocs", + "docid=? AND revid=?", whereArgs); + if (rowsDeleted == 0) { + return (getLocalDocument(docID, null) != null) ? new TDStatus( + TDStatus.CONFLICT) : new TDStatus(TDStatus.NOT_FOUND); + } + return new TDStatus(TDStatus.OK); + } catch (SQLException e) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + } } class TDValidationContextImpl implements TDValidationContext { - private TDDatabase database; - private TDRevision currentRevision; - private TDStatus errorType; - private String errorMessage; - - public TDValidationContextImpl(TDDatabase database, TDRevision currentRevision) { - this.database = database; - this.currentRevision = currentRevision; - this.errorType = new TDStatus(TDStatus.FORBIDDEN); - this.errorMessage = "invalid document"; - } - - @Override - public TDRevision getCurrentRevision() { - if(currentRevision != null) { - database.loadRevisionBody(currentRevision, EnumSet.noneOf(TDContentOptions.class)); - } - return currentRevision; - } - - @Override - public TDStatus getErrorType() { - return errorType; - } - - @Override - public void setErrorType(TDStatus status) { - this.errorType = status; - } - - @Override - public String getErrorMessage() { - return errorMessage; - } - - @Override - public void setErrorMessage(String message) { - this.errorMessage = message; - } + private TDDatabase database; + private TDRevision currentRevision; + private TDStatus errorType; + private String errorMessage; + + public TDValidationContextImpl(TDDatabase database, + TDRevision currentRevision) { + this.database = database; + this.currentRevision = currentRevision; + this.errorType = new TDStatus(TDStatus.FORBIDDEN); + this.errorMessage = "invalid document"; + } + + @Override + public TDRevision getCurrentRevision() { + if (currentRevision != null) { + database.loadRevisionBody(currentRevision, + EnumSet.noneOf(TDContentOptions.class)); + } + return currentRevision; + } + + @Override + public TDStatus getErrorType() { + return errorType; + } + + @Override + public void setErrorType(TDStatus status) { + this.errorType = status; + } + + @Override + public String getErrorMessage() { + return errorMessage; + } + + @Override + public void setErrorMessage(String message) { + this.errorMessage = message; + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPuller.java b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPuller.java index 00dbf2f..d7bab9b 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPuller.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPuller.java @@ -5,6 +5,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; @@ -15,7 +16,6 @@ import android.database.SQLException; import android.util.Log; -import com.couchbase.touchdb.TDBody; import com.couchbase.touchdb.TDDatabase; import com.couchbase.touchdb.TDMisc; import com.couchbase.touchdb.TDRevision; @@ -29,384 +29,463 @@ import com.couchbase.touchdb.support.TDBatchProcessor; import com.couchbase.touchdb.support.TDBatcher; import com.couchbase.touchdb.support.TDRemoteRequestCompletionBlock; -import com.couchbase.touchdb.support.TDSequenceMap; public class TDPuller extends TDReplicator implements TDChangeTrackerClient { - private static final int MAX_OPEN_HTTP_CONNECTIONS = 16; - - protected TDBatcher> downloadsToInsert; - protected List revsToPull; - protected TDChangeTracker changeTracker; - protected TDSequenceMap pendingSequences; - protected volatile int httpConnectionCount; - - public TDPuller(TDDatabase db, URL remote, boolean continuous, ScheduledExecutorService workExecutor) { - this(db, remote, continuous, null, workExecutor); - } - - public TDPuller(TDDatabase db, URL remote, boolean continuous, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor) { - super(db, remote, continuous, clientFactory, workExecutor); - } - - @Override - public void beginReplicating() { - if(downloadsToInsert == null) { - downloadsToInsert = new TDBatcher>(workExecutor, 200, 1000, new TDBatchProcessor>() { - @Override - public void process(List> inbox) { - insertRevisions(inbox); - } - }); - } - pendingSequences = new TDSequenceMap(); - Log.w(TDDatabase.TAG, this + " starting ChangeTracker with since=" + lastSequence); - changeTracker = new TDChangeTracker(remote, continuous ? TDChangeTrackerMode.LongPoll : TDChangeTrackerMode.OneShot, lastSequence, this); - if(filterName != null) { - changeTracker.setFilterName(filterName); - if(filterParams != null) { - changeTracker.setFilterParams(filterParams); - } - } - if(!continuous) { - asyncTaskStarted(); - } - changeTracker.start(); - } - - @Override - public void stop() { - - if(!running) { - return; - } - - if(changeTracker != null) { - changeTracker.setClient(null); // stop it from calling my changeTrackerStopped() - changeTracker.stop(); - changeTracker = null; - if(!continuous) { - asyncTaskFinished(1); // balances asyncTaskStarted() in beginReplicating() - } - } - - synchronized(this) { - revsToPull = null; - } - - super.stop(); - - if(downloadsToInsert != null) { - downloadsToInsert.flush(); - } - } - - @Override - public void stopped() { - downloadsToInsert = null; - super.stopped(); - } - - // Got a _changes feed entry from the TDChangeTracker. - @Override - public void changeTrackerReceivedChange(Map change) { - String lastSequence = change.get("seq").toString(); - String docID = (String)change.get("id"); - if(docID == null) { - return; - } - if(!TDDatabase.isValidDocumentId(docID)) { - Log.w(TDDatabase.TAG, String.format("%s: Received invalid doc ID from _changes: %s", this, change)); - return; - } - boolean deleted = (change.containsKey("deleted") && ((Boolean)change.get("deleted")).equals(Boolean.TRUE)); - List> changes = (List>)change.get("changes"); - for (Map changeDict : changes) { - String revID = (String)changeDict.get("rev"); - if(revID == null) { - continue; - } - TDPulledRevision rev = new TDPulledRevision(docID, revID, deleted); - rev.setRemoteSequenceID(lastSequence); - addToInbox(rev); - } - setChangesTotal(getChangesTotal() + changes.size()); - while(revsToPull != null && revsToPull.size() > 1000) { - try { - Thread.sleep(500); - } catch(InterruptedException e) { - - } - } - } - - @Override - public void changeTrackerStopped(TDChangeTracker tracker) { - Log.w(TDDatabase.TAG, this + ": ChangeTracker stopped"); - //FIXME tracker doesnt have error right now -// if(error == null && tracker.getError() != null) { -// error = tracker.getError(); -// } - changeTracker = null; - if(batcher != null) { - batcher.flush(); - } - - asyncTaskFinished(1); - } - - @Override - public HttpClient getHttpClient() { - HttpClient httpClient = this.clientFacotry.getHttpClient(); - - return httpClient; - } - - /** - * Process a bunch of remote revisions from the _changes feed at once - */ - @Override - public void processInbox(TDRevisionList inbox) { - // Ask the local database which of the revs are not known to it: - //Log.w(TDDatabase.TAG, String.format("%s: Looking up %s", this, inbox)); - String lastInboxSequence = ((TDPulledRevision)inbox.get(inbox.size()-1)).getRemoteSequenceID(); - int total = getChangesTotal() - inbox.size(); - if(!db.findMissingRevisions(inbox)) { - Log.w(TDDatabase.TAG, String.format("%s failed to look up local revs", this)); - inbox = null; - } - //introducing this to java version since inbox may now be null everywhere - int inboxCount = 0; - if(inbox != null) { - inboxCount = inbox.size(); - } - if(getChangesTotal() != total + inboxCount) { - setChangesTotal(total + inboxCount); - } - - if(inboxCount == 0) { - // Nothing to do. Just bump the lastSequence. - Log.w(TDDatabase.TAG, String.format("%s no new remote revisions to fetch", this)); - long seq = pendingSequences.addValue(lastInboxSequence); - pendingSequences.removeSequence(seq); - setLastSequence(pendingSequences.getCheckpointedValue()); - return; - } - - Log.v(TDDatabase.TAG, this + " fetching " + inboxCount + " remote revisions..."); - //Log.v(TDDatabase.TAG, String.format("%s fetching remote revisions %s", this, inbox)); - - // Dump the revs into the queue of revs to pull from the remote db: - synchronized (this) { - if(revsToPull == null) { - revsToPull = new ArrayList(200); - } - - for(int i=0; i < inbox.size(); i++) { - TDPulledRevision rev = (TDPulledRevision)inbox.get(i); + private static final int MAX_OPEN_HTTP_CONNECTIONS = 16; + + protected TDBatcher> downloadsToInsert; + protected List revsToPull; + protected long nextFakeSequence; + protected long maxInsertedFakeSequence; + protected TDChangeTracker changeTracker; + + protected int httpConnectionCount; + + public TDPuller(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + ScheduledExecutorService workExecutor) { + this(db, remote, access_token, headers, continuous, null, workExecutor); + } + + public TDPuller(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + HttpClientFactory clientFactory, + ScheduledExecutorService workExecutor) { + super(db, remote, access_token, headers, continuous, clientFactory, + workExecutor); + } + + @Override + public void beginReplicating() { + super.beginReplicating(); + + if (downloadsToInsert == null) { + downloadsToInsert = new TDBatcher>(workExecutor, 200, + 1000, new TDBatchProcessor>() { + @Override + public void process(List> inbox) { + insertRevisions(inbox); + } + }); + } + nextFakeSequence = maxInsertedFakeSequence = 0; + Log.w(TDDatabase.TAG, this + " starting ChangeTracker with since=" + + lastSequence); + changeTracker = new TDChangeTracker(remote, + continuous ? TDChangeTrackerMode.LongPoll + : TDChangeTrackerMode.OneShot, lastSequence, this); + if (filterName != null) { + changeTracker.setFilterName(filterName); + if (filterParams != null) { + changeTracker.setFilterParams(filterParams); + } + } + changeTracker.start(); + asyncTaskStarted(); + } + + @Override + public void stop() { + + if (!running) { + return; + } + + // Prevents NPE in the event, the app is closed before the replication + // could even start properly + if (changeTracker != null) { + changeTracker.setClient(null); // stop it from calling my + // changeTrackerStopped() + changeTracker.stop(); + changeTracker = null; + } + + synchronized (this) { + revsToPull = null; + } + + super.stop(); + + downloadsToInsert.flush(); + } + + @Override + public void stopped() { + + downloadsToInsert.flush(); + downloadsToInsert.close(); + + super.stopped(); + } + + // Got a _changes feed entry from the TDChangeTracker. + @Override + public void changeTrackerReceivedChange(Map change) { + // When there are no changes, we just send an empty map back + if (change.containsKey("id")) { + String lastSequence = change.get("seq").toString(); + String docID = (String) change.get("id"); + if (docID == null) { + return; + } + if (!TDDatabase.isValidDocumentId(docID)) { + Log.w(TDDatabase.TAG, String.format( + "%s: Received invalid doc ID from _changes: %s", this, + change)); + return; + } + boolean deleted = (change.containsKey("deleted") && ((Boolean) change + .get("deleted")).equals(Boolean.TRUE)); + List> changes = (List>) change + .get("changes"); + ArrayList revs = new ArrayList(); + for (Map changeDict : changes) { + String revID = (String) changeDict.get("rev"); + if (revID == null) { + continue; + } + TDRevision rev = new TDRevision(docID, revID, deleted); + // rev.setRemoteSequenceID(lastSequence); + rev.setSequence(++nextFakeSequence); + // addToInbox(rev); + revs.add(rev); + } + if (logRevisions(revs)) { + setChangesTotal(getChangesTotal() + changes.size()); + + // We set the sequence to ensure that changes tracker keeps + // moving forward. The docs pull eventually catches up. Filters + // are quite slow on CouchDB if you are pulling changes + // from the beginning, we want to retain as much progress we + // have + // made as possible + setLastSequence(lastSequence); + } + } + + // If we don't have anything in the buffer + if (revsToPull == null) { + super.beginReplicating(); + } else { + synchronized (revsToPull) { + if (revsToPull.size() == 0) { + super.beginReplicating(); + } + } + } + } + + @Override + public void changeTrackerStopped(TDChangeTracker tracker) { + Log.w(TDDatabase.TAG, this + ": ChangeTracker stopped"); + // FIXME tracker doesnt have error right now + // if(error == null && tracker.getError() != null) { + // error = tracker.getError(); + // } + changeTracker = null; + // if (batcher != null) { + // batcher.flush(); + // } + + // If the tracker is not working we need to stop this replicator + stop(); + + asyncTaskFinished(1); + } + + @Override + public HttpClient getHttpClient() { + HttpClient httpClient = this.clientFactory.getHttpClient(); + + return httpClient; + } + + /** + * Process a bunch of remote revisions from the _changes feed at once + */ + @Override + public void processInbox(TDRevisionList inbox) { + // Ask the local database which of the revs are not known to it: + // Log.w(TDDatabase.TAG, String.format("%s: Looking up %s", this, + // inbox)); + // String lastInboxSequence = ((TDPulledRevision) inbox + // .get(inbox.size() - 1)).getRemoteSequenceID(); + int total = getChangesTotal() - inbox.size(); + TDRevisionList removalList; + if ((removalList = db.findMissingRevisions(inbox)) == null) { + Log.w(TDDatabase.TAG, + String.format("%s failed to look up local revs", this)); + inbox = null; + } else { + // Remove all the entries we do not need to fetch + for (TDRevision rev : removalList) { + removeLogForRevision(rev); + } + } + // introducing this to java version since inbox may now be null + // everywhere + int inboxCount = 0; + if (inbox != null) { + inboxCount = inbox.size(); + } + if (getChangesTotal() != total + inboxCount) { + setChangesTotal(total + inboxCount); + } + + if (inboxCount == 0) { + // Nothing to do. Just bump the lastSequence. + Log.w(TDDatabase.TAG, + String.format("%s no new remote revisions to fetch", this)); + // long seq = pendingSequences.addValue(lastInboxSequence); + // pendingSequences.removeSequence(seq); + // setLastSequence(pendingSequences.getCheckpointedValue()); + + refiller_scheduled.set(false); + return; + } + + Log.v(TDDatabase.TAG, this + " fetching " + inboxCount + + " remote revisions..."); + // Log.v(TDDatabase.TAG, + // String.format("%s fetching remote revisions %s", this, inbox)); + + // Dump the revs into the queue of revs to pull from the remote db: + synchronized (this) { + if (revsToPull == null) { + revsToPull = new ArrayList(200); + } + + for (int i = 0; i < inbox.size(); i++) { + TDRevision rev = (TDRevision) inbox.get(i); // FIXME add logic here to pull initial revs in bulk - rev.setSequence(pendingSequences.addValue(rev.getRemoteSequenceID())); - revsToPull.add(rev); - } + // rev.setSequence(pendingSequences.addValue(rev + // .getRemoteSequenceID())); + revsToPull.add(rev); + } + } + + pullRemoteRevisions(); + } + + /** + * Start up some HTTP GETs, within our limit on the maximum simultaneous + * number + * + * The entire method is not synchronized, only the portion pulling work off + * the list Important to not hold the synchronized block while we do network + * access + */ + public void pullRemoteRevisions() { + + // If we don't have any remote revisions, refill again + if (revsToPull.size() == 0) { + Log.d(getLogTag(), "Called by pullRemoteRevisions"); + scheduleRefiller(new Date().getTime()); + } else { + // resets the counter + // synchronized (refiller_scheduled) { + Log.d(getLogTag(), "refiller_scheduled flag set to false"); + refiller_scheduled.set(false); + // } } - pullRemoteRevisions(); - } - - /** - * Start up some HTTP GETs, within our limit on the maximum simultaneous number - * - * The entire method is not synchronized, only the portion pulling work off the list - * Important to not hold the synchronized block while we do network access - */ - public void pullRemoteRevisions() { - //find the work to be done in a synchronized block - List workToStartNow = new ArrayList(); - synchronized (this) { - while(httpConnectionCount + workToStartNow.size() < MAX_OPEN_HTTP_CONNECTIONS && revsToPull != null && revsToPull.size() > 0) { + // find the work to be done in a synchronized block + List workToStartNow = new ArrayList(); + synchronized (this) { + while (httpConnectionCount + workToStartNow.size() < MAX_OPEN_HTTP_CONNECTIONS + && revsToPull != null && revsToPull.size() > 0) { TDRevision work = revsToPull.remove(0); workToStartNow.add(work); } } - //actually run it outside the synchronized block - for(TDRevision work : workToStartNow) { - pullRemoteRevision(work); - } - } - - /** - * Fetches the contents of a revision from the remote db, including its parent revision ID. - * The contents are stored into rev.properties. - */ - public void pullRemoteRevision(final TDRevision rev) { - asyncTaskStarted(); - ++httpConnectionCount; - - // Construct a query. We want the revision history, and the bodies of attachments that have - // been added since the latest revisions we have locally. - // See: http://wiki.apache.org/couchdb/HTTP_Document_API#Getting_Attachments_With_a_Document - StringBuilder path = new StringBuilder("/" + URLEncoder.encode(rev.getDocId()) + "?rev=" + URLEncoder.encode(rev.getRevId()) + "&revs=true&attachments=true"); - List knownRevs = knownCurrentRevIDs(rev); - if(knownRevs == null) { - //this means something is wrong, possibly the replicator has shut down - asyncTaskFinished(1); - --httpConnectionCount; - return; - } - if(knownRevs.size() > 0) { - path.append("&atts_since="); - path.append(joinQuotedEscaped(knownRevs)); - } - - //create a final version of this variable for the log statement inside - //FIXME find a way to avoid this - final String pathInside = path.toString(); - sendAsyncRequest("GET", pathInside, null, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object result, Throwable e) { - // OK, now we've got the response revision: - if(result != null) { - Map properties = (Map)result; - List history = db.parseCouchDBRevisionHistory(properties); - if(history != null) { - rev.setProperties(properties); - // Add to batcher ... eventually it will be fed to -insertRevisions:. - List toInsert = new ArrayList(); - toInsert.add(rev); - toInsert.add(history); - downloadsToInsert.queueObject(toInsert); - asyncTaskStarted(); - } else { - Log.w(TDDatabase.TAG, this + ": Missing revision history in response from " + pathInside); - setChangesProcessed(getChangesProcessed() + 1); - } - } else { - if(e != null) { - Log.e(TDDatabase.TAG, "Error pulling remote revision", e); - error = e; - } - setChangesProcessed(getChangesProcessed() + 1); - } - - // Note that we've finished this task; then start another one if there - // are still revisions waiting to be pulled: - asyncTaskFinished(1); - --httpConnectionCount; - pullRemoteRevisions(); - } - }); - - } - - /** - * This will be called when _revsToInsert fills up: - */ - public void insertRevisions(List> revs) { - Log.i(TDDatabase.TAG, this + " inserting " + revs.size() + " revisions..."); - //Log.v(TDDatabase.TAG, String.format("%s inserting %s", this, revs)); - - /* Updating self.lastSequence is tricky. It needs to be the received sequence ID of the revision for which we've successfully received and inserted (or rejected) it and all previous received revisions. That way, next time we can start tracking remote changes from that sequence ID and know we haven't missed anything. */ - /* FIX: The current code below doesn't quite achieve that: it tracks the latest sequence ID we've successfully processed, but doesn't handle failures correctly across multiple calls to -insertRevisions. I think correct behavior will require keeping an NSMutableIndexSet to track the fake-sequences of all processed revisions; then we can find the first missing index in that set and not advance lastSequence past the revision with that fake-sequence. */ - Collections.sort(revs, new Comparator>() { - - public int compare(List list1, List list2) { - TDRevision reva = (TDRevision)list1.get(0); - TDRevision revb = (TDRevision)list2.get(0); - return TDMisc.TDSequenceCompare(reva.getSequence(), revb.getSequence()); - } - - }); - - if(db == null) { - asyncTaskFinished(revs.size()); - return; - } - db.beginTransaction(); - boolean success = false; - try { - for (List revAndHistory : revs) { - TDPulledRevision rev = (TDPulledRevision)revAndHistory.get(0); - long fakeSequence = rev.getSequence(); - List history = (List)revAndHistory.get(1); - // Insert the revision: - TDStatus status = db.forceInsert(rev, history, remote); - if(!status.isSuccessful()) { - if(status.getCode() == TDStatus.FORBIDDEN) { - Log.i(TDDatabase.TAG, this + ": Remote rev failed validation: " + rev); - } else { - Log.w(TDDatabase.TAG, this + " failed to write " + rev + ": status=" + status.getCode()); - error = new HttpResponseException(status.getCode(), null); - continue; - } - } - - pendingSequences.removeSequence(fakeSequence); - } - - Log.w(TDDatabase.TAG, this + " finished inserting " + revs.size() + " revisions"); - - setLastSequence(pendingSequences.getCheckpointedValue()); - - success = true; - } catch(SQLException e) { - Log.w(TDDatabase.TAG, this + ": Exception inserting revisions", e); - } finally { - db.endTransaction(success); - asyncTaskFinished(revs.size()); - } - - setChangesProcessed(getChangesProcessed() + revs.size()); - } - - List knownCurrentRevIDs(TDRevision rev) { - if(db != null) { - return db.getAllRevisionsOfDocumentID(rev.getDocId(), true).getAllRevIds(); - } - return null; - } - - public String joinQuotedEscaped(List strings) { - if(strings.size() == 0) { - return "[]"; - } - byte[] json = null; - try { - json = TDServer.getObjectMapper().writeValueAsBytes(strings); - } catch (Exception e) { - Log.w(TDDatabase.TAG, "Unable to serialize json", e); - } - return URLEncoder.encode(new String(json)); - } + // actually run it outside the synchronized block + for (TDRevision work : workToStartNow) { + pullRemoteRevision(work); + } + } + + /** + * Fetches the contents of a revision from the remote db, including its + * parent revision ID. The contents are stored into rev.properties. + */ + public void pullRemoteRevision(final TDRevision rev) { + updateLogRevision(rev, new Date().getTime()); + + asyncTaskStarted(); + ++httpConnectionCount; + + // Construct a query. We want the revision history, and the bodies of + // attachments that have + // been added since the latest revisions we have locally. + // See: + // http://wiki.apache.org/couchdb/HTTP_Document_API#Getting_Attachments_With_a_Document + StringBuilder path = new StringBuilder("/" + + URLEncoder.encode(rev.getDocId()) + "?rev=" + + URLEncoder.encode(rev.getRevId()) + + "&revs=true&attachments=true"); + List knownRevs = knownCurrentRevIDs(rev); + if (knownRevs == null) { + // this means something is wrong, possibly the replicator has shut + // down + asyncTaskFinished(1); + --httpConnectionCount; + return; + } + if (knownRevs.size() > 0) { + path.append("&atts_since="); + path.append(joinQuotedEscaped(knownRevs)); + } -} + path.append("&access_token=").append(access_token); + + // create a final version of this variable for the log statement inside + // FIXME find a way to avoid this + final String pathInside = path.toString(); + sendAsyncRequest("GET", pathInside, headers, null, + new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object result, Throwable e) { + // OK, now we've got the response revision: + if (result != null) { + Map properties = (Map) result; + List history = db + .parseCouchDBRevisionHistory(properties); + if (history != null) { + rev.setProperties(properties); + // Add to batcher ... eventually it will be fed + // to -insertRevisions:. + List toInsert = new ArrayList(); + toInsert.add(rev); + toInsert.add(history); + downloadsToInsert.queueObject(toInsert); + asyncTaskStarted(); + } else { + Log.w(TDDatabase.TAG, + this + + ": Missing revision history in response from " + + pathInside); + setChangesProcessed(getChangesProcessed() + 1); + } + } else { + if (e != null) { + Log.e(TDDatabase.TAG, + "Error pulling remote revision", e); + error = e; + } + setChangesProcessed(getChangesProcessed() + 1); + } + + // Note that we've finished this task; then start + // another one if there + // are still revisions waiting to be pulled: + asyncTaskFinished(1); + --httpConnectionCount; + pullRemoteRevisions(); + } + }); + } + + /** + * This will be called when _revsToInsert fills up: + */ + public void insertRevisions(List> revs) { + Log.i(TDDatabase.TAG, this + " inserting " + revs.size() + + " revisions..."); + // Log.v(TDDatabase.TAG, String.format("%s inserting %s", this, revs)); + + /* + * Updating self.lastSequence is tricky. It needs to be the received + * sequence ID of the revision for which we've successfully received and + * inserted (or rejected) it and all previous received revisions. That + * way, next time we can start tracking remote changes from that + * sequence ID and know we haven't missed anything. + */ + /* + * FIX: The current code below doesn't quite achieve that: it tracks the + * latest sequence ID we've successfully processed, but doesn't handle + * failures correctly across multiple calls to -insertRevisions. I think + * correct behavior will require keeping an NSMutableIndexSet to track + * the fake-sequences of all processed revisions; then we can find the + * first missing index in that set and not advance lastSequence past the + * revision with that fake-sequence. + */ + Collections.sort(revs, new Comparator>() { + + public int compare(List list1, List list2) { + TDRevision reva = (TDRevision) list1.get(0); + TDRevision revb = (TDRevision) list2.get(0); + return TDMisc.TDSequenceCompare(reva.getSequence(), + revb.getSequence()); + } -/** - * A revision received from a remote server during a pull. Tracks the opaque remote sequence ID. - */ -class TDPulledRevision extends TDRevision { + }); - public TDPulledRevision(TDBody body) { - super(body); - } + if (db == null) { + asyncTaskFinished(revs.size()); + return; + } + db.beginTransaction(); + boolean success = false; + try { + for (List revAndHistory : revs) { + TDRevision rev = (TDRevision) revAndHistory.get(0); + long fakeSequence = rev.getSequence(); + List history = (List) revAndHistory.get(1); + // Insert the revision: + TDStatus status = db.forceInsert(rev, history, + remote.toExternalForm()); + if (!status.isSuccessful()) { + if (status.getCode() == TDStatus.FORBIDDEN) { + Log.i(TDDatabase.TAG, this + + ": Remote rev failed validation: " + rev); + } else { + Log.w(TDDatabase.TAG, this + " failed to write " + rev + + ": status=" + status.getCode()); + error = new HttpResponseException(status.getCode(), + null); + continue; + } + } else { + removeLogForRevision(rev); + } + } - public TDPulledRevision(String docId, String revId, boolean deleted) { - super(docId, revId, deleted); - } + Log.w(TDDatabase.TAG, this + " finished inserting " + revs.size() + + " revisions"); - public TDPulledRevision(Map properties) { - super(properties); - } + success = true; + } catch (SQLException e) { + Log.w(TDDatabase.TAG, this + ": Exception inserting revisions", e); + } finally { + db.endTransaction(success); + asyncTaskFinished(revs.size()); + } - protected String remoteSequenceID; + setChangesProcessed(getChangesProcessed() + revs.size()); + } - public String getRemoteSequenceID() { - return remoteSequenceID; - } + List knownCurrentRevIDs(TDRevision rev) { + if (db != null) { + return db.getAllRevisionsOfDocumentID(rev.getDocId(), true) + .getAllRevIds(); + } + return null; + } - public void setRemoteSequenceID(String remoteSequenceID) { - this.remoteSequenceID = remoteSequenceID; - } + public String joinQuotedEscaped(List strings) { + if (strings.size() == 0) { + return "[]"; + } + byte[] json = null; + try { + json = TDServer.getObjectMapper().writeValueAsBytes(strings); + } catch (Exception e) { + Log.w(TDDatabase.TAG, "Unable to serialize json", e); + } + return URLEncoder.encode(new String(json)); + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPusher.java b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPusher.java index 2a45d8a..fcc6db1 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPusher.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDPusher.java @@ -2,6 +2,7 @@ import java.net.URL; import java.util.ArrayList; +import java.util.Date; import java.util.EnumSet; import java.util.HashMap; import java.util.List; @@ -24,217 +25,313 @@ public class TDPusher extends TDReplicator implements Observer { - private boolean createTarget; - private boolean observing; - private TDFilterBlock filter; - - public TDPusher(TDDatabase db, URL remote, boolean continuous, ScheduledExecutorService workExecutor) { - this(db, remote, continuous, null, workExecutor); - } - - public TDPusher(TDDatabase db, URL remote, boolean continuous, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor) { - super(db, remote, continuous, clientFactory, workExecutor); - createTarget = false; - observing = false; - } - - public void setCreateTarget(boolean createTarget) { - this.createTarget = createTarget; - } - - public void setFilter(TDFilterBlock filter) { - this.filter = filter; - } - - @Override - public boolean isPush() { - return true; - } - - @Override - public void maybeCreateRemoteDB() { - if(!createTarget) { - return; - } - Log.v(TDDatabase.TAG, "Remote db might not exist; creating it..."); - sendAsyncRequest("PUT", "", null, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object result, Throwable e) { - if(e != null && e instanceof HttpResponseException && ((HttpResponseException)e).getStatusCode() != 412) { - Log.e(TDDatabase.TAG, "Failed to create remote db", e); - error = e; - stop(); - } else { - Log.v(TDDatabase.TAG, "Created remote db"); - createTarget = false; - beginReplicating(); - } - } - - }); - } - - @Override - public void beginReplicating() { - // If we're still waiting to create the remote db, do nothing now. (This method will be - // re-invoked after that request finishes; see maybeCreateRemoteDB() above.) - if(createTarget) { - return; - } - - if(filterName != null) { - filter = db.getFilterNamed(filterName); - } - if(filterName != null && filter == null) { - Log.w(TDDatabase.TAG, String.format("%s: No TDFilterBlock registered for filter '%s'; ignoring", this, filterName));; - } - - // Process existing changes since the last push: - long lastSequenceLong = 0; - if(lastSequence != null) { - lastSequenceLong = Long.parseLong(lastSequence); - } - TDRevisionList changes = db.changesSince(lastSequenceLong, null, filter); - if(changes.size() > 0) { - processInbox(changes); - } - - // Now listen for future changes (in continuous mode): - if(continuous) { - observing = true; - db.addObserver(this); - asyncTaskStarted(); // prevents stopped() from being called when other tasks finish - } - } - - @Override - public void stop() { - stopObserving(); - super.stop(); - } - - private void stopObserving() { - if(observing) { - observing = false; - db.deleteObserver(this); - asyncTaskFinished(1); - } - } - - @Override - public void update(Observable observable, Object data) { - //make sure this came from where we expected - if(observable == db) { - Map change = (Map)data; - // Skip revisions that originally came from the database I'm syncing to: - URL source = (URL)change.get("source"); - if(source != null && source.equals(remote.toExternalForm())) { - return; - } - TDRevision rev = (TDRevision)change.get("rev"); - if(rev != null && ((filter == null) || filter.filter(rev))) { - addToInbox(rev); - } - } - - } - - @Override - public void processInbox(final TDRevisionList inbox) { - final long lastInboxSequence = inbox.get(inbox.size()-1).getSequence(); - // Generate a set of doc/rev IDs in the JSON format that _revs_diff wants: - Map> diffs = new HashMap>(); - for (TDRevision rev : inbox) { - String docID = rev.getDocId(); - List revs = diffs.get(docID); - if(revs == null) { - revs = new ArrayList(); - diffs.put(docID, revs); - } - revs.add(rev.getRevId()); - } - - // Call _revs_diff on the target db: - asyncTaskStarted(); - sendAsyncRequest("POST", "/_revs_diff", diffs, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object response, Throwable e) { - Map results = (Map)response; - if(e != null) { - error = e; - stop(); - } else if(results.size() != 0) { - // Go through the list of local changes again, selecting the ones the destination server - // said were missing and mapping them to a JSON dictionary in the form _bulk_docs wants: - List docsToSend = new ArrayList(); - for(TDRevision rev : inbox) { - Map properties = null; - Map resultDoc = (Map)results.get(rev.getDocId()); - if(resultDoc != null) { - List revs = (List)resultDoc.get("missing"); - if(revs != null && revs.contains(rev.getRevId())) { - //remote server needs this revision - // Get the revision's properties - if(rev.isDeleted()) { - properties = new HashMap(); - properties.put("_id", rev.getDocId()); - properties.put("_rev", rev.getRevId()); - properties.put("_deleted", true); - } else { - // OPT: Shouldn't include all attachment bodies, just ones that have changed - // OPT: Should send docs with many or big attachments as multipart/related - TDStatus status = db.loadRevisionBody(rev, EnumSet.of(TDDatabase.TDContentOptions.TDIncludeAttachments)); - if(!status.isSuccessful()) { - Log.w(TDDatabase.TAG, String.format("%s: Couldn't get local contents of %s", this, rev)); - } else { - properties = new HashMap(rev.getProperties()); - } - } - if(properties != null) { - // Add the _revisions list: - properties.put("_revisions", db.getRevisionHistoryDict(rev)); - //now add it to the docs to send - docsToSend.add(properties); - } - } - } - } - - // Post the revisions to the destination. "new_edits":false means that the server should - // use the given _rev IDs instead of making up new ones. - final int numDocsToSend = docsToSend.size(); - Map bulkDocsBody = new HashMap(); - bulkDocsBody.put("docs", docsToSend); - bulkDocsBody.put("new_edits", false); - Log.i(TDDatabase.TAG, String.format("%s: Sending %d revisions", this, numDocsToSend)); - Log.v(TDDatabase.TAG, String.format("%s: Sending %s", this, inbox)); - setChangesTotal(getChangesTotal() + numDocsToSend); - asyncTaskStarted(); - sendAsyncRequest("POST", "/_bulk_docs", bulkDocsBody, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object result, Throwable e) { - if(e != null) { - error = e; - } else { - Log.v(TDDatabase.TAG, String.format("%s: Sent %s", this, inbox)); - setLastSequence(String.format("%d", lastInboxSequence)); - } - setChangesProcessed(getChangesProcessed() + numDocsToSend); - asyncTaskFinished(1); - } - }); - - } else { - // If none of the revisions are new to the remote, just bump the lastSequence: - setLastSequence(String.format("%d", lastInboxSequence)); - } - asyncTaskFinished(1); - } - - }); - } + private boolean createTarget; + private boolean observing; + private TDFilterBlock filter; + + public TDPusher(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + ScheduledExecutorService workExecutor) { + this(db, remote, access_token, headers, continuous, null, workExecutor); + } + + public TDPusher(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + HttpClientFactory clientFactory, + ScheduledExecutorService workExecutor) { + super(db, remote, access_token, headers, continuous, clientFactory, + workExecutor); + createTarget = false; + observing = false; + } + + public void setCreateTarget(boolean createTarget) { + this.createTarget = createTarget; + } + + public void setFilter(TDFilterBlock filter) { + this.filter = filter; + } + + @Override + public boolean isPush() { + return true; + } + + @Override + public void maybeCreateRemoteDB() { + if (!createTarget) { + return; + } + Log.v(TDDatabase.TAG, "Remote db might not exist; creating it..."); + sendAsyncRequest("PUT", "", this.headers, null, + new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object result, Throwable e) { + if (e != null + && e instanceof HttpResponseException + && ((HttpResponseException) e).getStatusCode() != 412) { + Log.e(TDDatabase.TAG, "Failed to create remote db", + e); + error = e; + stop(); + } else { + Log.v(TDDatabase.TAG, "Created remote db"); + createTarget = false; + beginReplicating(); + } + } + + }); + } + + @Override + public void beginReplicating() { + + // If we're still waiting to create the remote db, do nothing now. (This + // method will be + // re-invoked after that request finishes; see maybeCreateRemoteDB() + // above.) + if (createTarget) { + return; + } + + if (filterName != null) { + filter = db.getFilterNamed(filterName); + } + if (filterName != null && filter == null) { + Log.w(TDDatabase.TAG, + String.format( + "%s: No TDFilterBlock registered for filter '%s'; ignoring", + this, filterName)); + ; + } + + // Process existing changes since the last push: + long lastSequenceLong = 0; + if (lastSequence != null) { + lastSequenceLong = Long.parseLong(lastSequence); + } + TDRevisionList changes = db + .changesSince(lastSequenceLong, null, filter); + if (changes.size() > 0) { + // Write these changes + // processInbox(changes); + if (logRevisions(changes)) { + long lastSeq = changes.get(changes.size() - 1).getSequence(); + setLastSequence(String.format("%d", lastSeq)); + } + } + + // Now listen for future changes (in continuous mode): + if (continuous) { + observing = true; + db.addObserver(this); + asyncTaskStarted(); // prevents stopped() from being called when + // other tasks finish + } + + super.beginReplicating(); + } + + @Override + public void stop() { + stopObserving(); + super.stop(); + } + + private void stopObserving() { + if (observing) { + observing = false; + db.deleteObserver(this); + asyncTaskFinished(1); + } + } + + @Override + public void update(Observable observable, Object data) { + // make sure this came from where we expected + if (observable == db) { + Map change = (Map) data; + // Skip revisions that originally came from the database I'm syncing + // to: + String source = (String) change.get("source"); + if (source != null && source.equals(remote.toExternalForm())) { + return; + } + TDRevision rev = (TDRevision) change.get("rev"); + if (rev != null && ((filter == null) || filter.filter(rev))) { + // addToInbox(rev); + + // We add it to the log and we move the counter up + if (logRevision(rev)) { + setLastSequence(String.format("%d", rev.getSequence())); + } + } + } + + super.beginReplicating(); + } + + @Override + public void processInbox(final TDRevisionList inbox) { + if (inbox.size() == 0) { + scheduleRefiller(); + return; + } else { + refiller_scheduled.set(false); + } + + final long lastInboxSequence = inbox.get(inbox.size() - 1) + .getSequence(); + // Generate a set of doc/rev IDs in the JSON format that _revs_diff + // wants: + Map> diffs = new HashMap>(); + for (TDRevision rev : inbox) { + String docID = rev.getDocId(); + List revs = diffs.get(docID); + if (revs == null) { + revs = new ArrayList(); + diffs.put(docID, revs); + } + revs.add(rev.getRevId()); + updateLogRevision(rev, new Date().getTime()); + } + + // Call _revs_diff on the target db: + asyncTaskStarted(); + sendAsyncRequest("POST", "/_revs_diff?access_token=" + access_token, + this.headers, diffs, new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object response, Throwable e) { + Map results = (Map) response; + if (e != null) { + error = e; + stop(); + } else if (results.size() != 0) { + // Go through the list of local changes again, + // selecting the ones the destination server + // said were missing and mapping them to a JSON + // dictionary in the form _bulk_docs wants: + List docsToSend = new ArrayList(); + for (TDRevision rev : inbox) { + Map properties = null; + Map resultDoc = (Map) results + .get(rev.getDocId()); + if (resultDoc != null) { + List revs = (List) resultDoc + .get("missing"); + if (revs != null + && revs.contains(rev.getRevId())) { + // remote server needs this revision + // Get the revision's properties + if (rev.isDeleted()) { + properties = new HashMap(); + properties.put("_id", + rev.getDocId()); + properties.put("_rev", + rev.getRevId()); + properties.put("_deleted", true); + } else { + // OPT: Shouldn't include all + // attachment bodies, just ones that + // have changed + // OPT: Should send docs with many + // or big attachments as + // multipart/related + TDStatus status = db + .loadRevisionBody( + rev, + EnumSet.of(TDDatabase.TDContentOptions.TDIncludeAttachments)); + if (!status.isSuccessful()) { + Log.w(TDDatabase.TAG, + String.format( + "%s: Couldn't get local contents of %s", + this, rev)); + } else { + properties = new HashMap( + rev.getProperties()); + } + } + if (properties != null) { + // Add the _revisions list: + properties.put( + "_revisions", + db.getRevisionHistoryDict(rev)); + // now add it to the docs to send + docsToSend.add(properties); + } + } + } + } + + // Post the revisions to the destination. + // "new_edits":false means that the server should + // use the given _rev IDs instead of making up new + // ones. + final int numDocsToSend = docsToSend.size(); + Map bulkDocsBody = new HashMap(); + bulkDocsBody.put("docs", docsToSend); + bulkDocsBody.put("new_edits", false); + bulkDocsBody.put("all_or_nothing", true); + Log.i(TDDatabase.TAG, String.format( + "%s: Sending %d revisions", this, + numDocsToSend)); + Log.v(TDDatabase.TAG, String.format( + "%s: Sending %s", this, inbox)); + setChangesTotal(getChangesTotal() + numDocsToSend); + asyncTaskStarted(); + sendAsyncRequest("POST", + "/_bulk_docs?access_token=" + access_token, headers, + bulkDocsBody, + new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object result, + Throwable e) { + if (e != null) { + error = e; + } else { + Log.v(TDDatabase.TAG, String + .format("%s: Sent %s", + this, inbox)); + // setLastSequence(String.format("%d", + // lastInboxSequence)); + db.beginTransaction(); + for (TDRevision rev : inbox) { + removeLogForRevision(rev); + } + db.endTransaction(true); + } + setChangesProcessed(getChangesProcessed() + + numDocsToSend); + asyncTaskFinished(1); + + scheduleRefiller(new Date() + .getTime()); + } + }); + + } else { + // If none of the revisions are new to the remote, + // just bump the lastSequence: + // setLastSequence(String.format("%d", + // lastInboxSequence)); + // Remove entries from replicator_log + db.beginTransaction(); + for (TDRevision rev : inbox) { + removeLogForRevision(rev); + } + db.endTransaction(true); + + scheduleRefiller(new Date().getTime()); + } + asyncTaskFinished(1); + } + + }); + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDReplicator.java b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDReplicator.java index 9df3d8e..998f18a 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDReplicator.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/replicator/TDReplicator.java @@ -2,6 +2,7 @@ import java.net.MalformedURLException; import java.net.URL; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -10,12 +11,12 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpResponseException; import org.apache.http.impl.client.DefaultHttpClient; -import android.os.Handler; import android.util.Log; import com.couchbase.touchdb.TDDatabase; @@ -30,322 +31,470 @@ public abstract class TDReplicator extends Observable { - private static int lastSessionID = 0; - - protected ScheduledExecutorService workExecutor; - protected TDDatabase db; - protected URL remote; - protected boolean continuous; - protected String lastSequence; - protected boolean lastSequenceChanged; - protected Map remoteCheckpoint; - protected boolean savingCheckpoint; - protected boolean overdueForSave; - protected boolean running; - protected boolean active; - protected Throwable error; - protected String sessionID; - protected TDBatcher batcher; - protected int asyncTaskCount; - private int changesProcessed; - private int changesTotal; - protected final HttpClientFactory clientFacotry; - protected String filterName; - protected Map filterParams; - protected ExecutorService remoteRequestExecutor; - - protected static final int PROCESSOR_DELAY = 500; - protected static final int INBOX_CAPACITY = 100; - - public TDReplicator(TDDatabase db, URL remote, boolean continuous, ScheduledExecutorService workExecutor) { - this(db, remote, continuous, null, workExecutor); - } - - public TDReplicator(TDDatabase db, URL remote, boolean continuous, HttpClientFactory clientFacotry, ScheduledExecutorService workExecutor) { - - this.db = db; - this.remote = remote; - this.continuous = continuous; - this.workExecutor = workExecutor; - - this.remoteRequestExecutor = Executors.newCachedThreadPool(); - - - batcher = new TDBatcher(workExecutor, INBOX_CAPACITY, PROCESSOR_DELAY, new TDBatchProcessor() { - @Override - public void process(List inbox) { - Log.v(TDDatabase.TAG, "*** " + toString() + ": BEGIN processInbox (" + inbox.size() + " sequences)"); - processInbox(new TDRevisionList(inbox)); - Log.v(TDDatabase.TAG, "*** " + toString() + ": END processInbox (lastSequence=" + lastSequence); - active = false; - } - }); - - this.clientFacotry = clientFacotry != null ? clientFacotry : new HttpClientFactory() { - @Override - public HttpClient getHttpClient() { - return new DefaultHttpClient(); + private static int lastSessionID = 0; + + protected ScheduledExecutorService workExecutor; + protected TDDatabase db; + protected URL remote; + protected boolean continuous; + protected String lastSequence; + protected boolean lastSequenceChanged; + protected Map remoteCheckpoint; + protected boolean savingCheckpoint; + protected boolean overdueForSave; + protected boolean running; + protected boolean active; + protected Throwable error; + protected String sessionID; + protected TDBatcher batcher; + protected int asyncTaskCount; + private int changesProcessed; + private int changesTotal; + protected final HttpClientFactory clientFactory; + protected String filterName; + protected Map filterParams; + + protected static final int PROCESSOR_DELAY = 500; + protected static final int INBOX_CAPACITY = 100; + + protected String access_token = null; + // protected AtomicBoolean pending_changes_running = new + // AtomicBoolean(false); + protected AtomicBoolean refiller_scheduled = new AtomicBoolean(false); + + private ExecutorService remoteRequestExecutor; + + protected Map headers; + + public TDReplicator(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + ScheduledExecutorService workExecutor) { + this(db, remote, access_token, headers, continuous, null, workExecutor); + } + + public TDReplicator(TDDatabase db, URL remote, String access_token, + Map headers, boolean continuous, + HttpClientFactory clientFacotry, + ScheduledExecutorService workExecutor) { + + this.db = db; + this.remote = remote; + this.access_token = access_token; + this.headers = headers; + this.continuous = continuous; + this.workExecutor = workExecutor; + + this.remoteRequestExecutor = Executors.newCachedThreadPool(); + + batcher = new TDBatcher(workExecutor, INBOX_CAPACITY, + PROCESSOR_DELAY, new TDBatchProcessor() { + + @Override + public void process(List inbox) { + Log.v(TDDatabase.TAG, "*** " + toString() + + ": BEGIN processInbox (" + inbox.size() + + " sequences)"); + processInbox(new TDRevisionList(inbox)); + Log.v(TDDatabase.TAG, "*** " + toString() + + ": END processInbox (lastSequence=" + + lastSequence); + active = false; + } + }); + + this.clientFactory = clientFacotry != null ? clientFacotry + : new HttpClientFactory() { + @Override + public HttpClient getHttpClient() { + return new DefaultHttpClient(); + } + }; + } + + public void setFilterName(String filterName) { + this.filterName = filterName; + } + + public void setFilterParams(Map filterParams) { + this.filterParams = filterParams; + } + + public boolean isRunning() { + return running; + } + + public URL getRemote() { + return remote; + } + + public void databaseClosing() { + saveLastSequence(); + stop(); + db = null; + } + + public String toString() { + String maskedRemoteWithoutCredentials = (remote != null ? remote + .toExternalForm() : ""); + maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials + .replaceAll("://.*:.*@", "://---:---@"); + String name = getClass().getSimpleName() + "[" + + maskedRemoteWithoutCredentials + "]"; + return name; + } + + public boolean isPush() { + return false; + } + + public String getLastSequence() { + return lastSequence; + } + + public void setLastSequence(String lastSequenceIn) { + if (!lastSequenceIn.equals(lastSequence)) { + Log.v(TDDatabase.TAG, toString() + ": Setting lastSequence to " + + lastSequenceIn + " from( " + lastSequence + ")"); + lastSequence = lastSequenceIn; + if (!lastSequenceChanged) { + lastSequenceChanged = true; + workExecutor.schedule(new Runnable() { + + @Override + public void run() { + saveLastSequence(); + } + }, 2 * 1000, TimeUnit.MILLISECONDS); } - }; - } - - public void setFilterName(String filterName) { - this.filterName = filterName; - } - - public void setFilterParams(Map filterParams) { - this.filterParams = filterParams; - } - - public boolean isRunning() { - return running; - } - - public URL getRemote() { - return remote; - } - - public void databaseClosing() { - saveLastSequence(); - stop(); - db = null; - } - - public String toString() { - String maskedRemoteWithoutCredentials = (remote != null ? remote.toExternalForm() : ""); - maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials.replaceAll("://.*:.*@","://---:---@"); - String name = getClass().getSimpleName() + "[" + maskedRemoteWithoutCredentials + "]"; - return name; - } - - public boolean isPush() { - return false; - } - - public String getLastSequence() { - return lastSequence; - } - - public void setLastSequence(String lastSequenceIn) { - if(!lastSequenceIn.equals(lastSequence)) { - Log.v(TDDatabase.TAG, toString() + ": Setting lastSequence to " + lastSequenceIn + " from( " + lastSequence + ")"); - lastSequence = lastSequenceIn; - if(!lastSequenceChanged) { - lastSequenceChanged = true; - workExecutor.schedule(new Runnable() { - - @Override - public void run() { - saveLastSequence(); - } - }, 2 * 1000, TimeUnit.MILLISECONDS); - } - } - } - - public int getChangesProcessed() { - return changesProcessed; - } - - public void setChangesProcessed(int processed) { - this.changesProcessed = processed; - setChanged(); - notifyObservers(); - } - - public int getChangesTotal() { - return changesTotal; - } - - public void setChangesTotal(int total) { - this.changesTotal = total; - setChanged(); - notifyObservers(); - } - - public String getSessionID() { - return sessionID; - } - - public void start() { - if(running) { - return; - } - this.sessionID = String.format("repl%03d", ++lastSessionID); - Log.v(TDDatabase.TAG, toString() + " STARTING ..."); - running = true; - lastSequence = null; - - fetchRemoteCheckpointDoc(); - } - - public abstract void beginReplicating(); - - public void stop() { - if(!running) { - return; - } - Log.v(TDDatabase.TAG, toString() + " STOPPING..."); - batcher.flush(); - continuous = false; - if(asyncTaskCount == 0) { - stopped(); - } - } - - public void stopped() { - Log.v(TDDatabase.TAG, toString() + " STOPPED"); - running = false; - this.changesProcessed = this.changesTotal = 0; - - saveLastSequence(); - - batcher = null; - db = null; - } - - public synchronized void asyncTaskStarted() { - ++asyncTaskCount; - } - - public synchronized void asyncTaskFinished(int numTasks) { - this.asyncTaskCount -= numTasks; - if(asyncTaskCount == 0) { - if(!continuous) { - stopped(); - } - } - } - - public void addToInbox(TDRevision rev) { - if(batcher.count() == 0) { - active = true; - } - batcher.queueObject(rev); - //Log.v(TDDatabase.TAG, String.format("%s: Received #%d %s", toString(), rev.getSequence(), rev.toString())); - } - - public void processInbox(TDRevisionList inbox) { - - } - - public void sendAsyncRequest(String method, String relativePath, Object body, TDRemoteRequestCompletionBlock onCompletion) { - //Log.v(TDDatabase.TAG, String.format("%s: %s .%s", toString(), method, relativePath)); - String urlStr = remote.toExternalForm() + relativePath; - try { - URL url = new URL(urlStr); - TDRemoteRequest request = new TDRemoteRequest(workExecutor, clientFacotry, method, url, body, onCompletion); - remoteRequestExecutor.execute(request); - } catch (MalformedURLException e) { - Log.e(TDDatabase.TAG, "Malformed URL for async request", e); - } - } - - /** CHECKPOINT STORAGE: **/ - - public void maybeCreateRemoteDB() { - // TDPusher overrides this to implement the .createTarget option - } - - /** - * This is the _local document ID stored on the remote server to keep track of state. - * Its ID is based on the local database ID (the private one, to make the result unguessable) - * and the remote database's URL. - */ - public String remoteCheckpointDocID() { - if(db == null) { - return null; - } - String input = db.privateUUID() + "\n" + remote.toExternalForm() + "\n" + (isPush() ? "1" : "0"); - return TDMisc.TDHexSHA1Digest(input.getBytes()); - } - - public void fetchRemoteCheckpointDoc() { - lastSequenceChanged = false; - final String localLastSequence = db.lastSequenceWithRemoteURL(remote, isPush()); - if(localLastSequence == null) { - maybeCreateRemoteDB(); - beginReplicating(); - return; - } - - asyncTaskStarted(); - sendAsyncRequest("GET", "/_local/" + remoteCheckpointDocID(), null, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object result, Throwable e) { - if(e != null && e instanceof HttpResponseException && ((HttpResponseException)e).getStatusCode() != 404) { - error = e; - } else { - if(e instanceof HttpResponseException && ((HttpResponseException)e).getStatusCode() == 404) { - maybeCreateRemoteDB(); - } - Map response = (Map)result; - remoteCheckpoint = response; - String remoteLastSequence = null; - if(response != null) { - remoteLastSequence = (String)response.get("lastSequence"); - } - if(remoteLastSequence != null && remoteLastSequence.equals(localLastSequence)) { - lastSequence = localLastSequence; - Log.v(TDDatabase.TAG, this + ": Replicating from lastSequence=" + lastSequence); - } else { - Log.v(TDDatabase.TAG, this + ": lastSequence mismatch: I had " + localLastSequence + ", remote had " + remoteLastSequence); - } - beginReplicating(); - } - asyncTaskFinished(1); - } - - }); - } - - public void saveLastSequence() { - if(!lastSequenceChanged) { - return; - } - if (savingCheckpoint) { - // If a save is already in progress, don't do anything. (The completion block will trigger - // another save after the first one finishes.) - overdueForSave = true; - return; - } - - lastSequenceChanged = false; - overdueForSave = false; - - Log.v(TDDatabase.TAG, this + " checkpointing sequence=" + lastSequence); - final Map body = new HashMap(); - if(remoteCheckpoint != null) { - body.putAll(remoteCheckpoint); - } - body.put("lastSequence", lastSequence); - - String remoteCheckpointDocID = remoteCheckpointDocID(); - if(remoteCheckpointDocID == null) { - return; - } - savingCheckpoint = true; - sendAsyncRequest("PUT", "/_local/" + remoteCheckpointDocID, body, new TDRemoteRequestCompletionBlock() { - - @Override - public void onCompletion(Object result, Throwable e) { - savingCheckpoint = false; - if(e != null) { - Log.v(TDDatabase.TAG, this + ": Unable to save remote checkpoint", e); - // TODO: If error is 401 or 403, and this is a pull, remember that remote is read-only and don't attempt to read its checkpoint next time. - } else { - Map response = (Map)result; - body.put("_rev", response.get("rev")); - remoteCheckpoint = body; - } - if (overdueForSave) { - saveLastSequence(); - } - } - - }); - db.setLastSequence(lastSequence, remote, isPush()); - } + } + } + + public int getChangesProcessed() { + return changesProcessed; + } + + public void setChangesProcessed(int processed) { + this.changesProcessed = processed; + setChanged(); + notifyObservers(); + } + + public int getChangesTotal() { + return changesTotal; + } + + public void setChangesTotal(int total) { + this.changesTotal = total; + setChanged(); + notifyObservers(); + } + + public String getSessionID() { + return sessionID; + } + + public void start() { + if (running) { + return; + } + this.sessionID = String.format("repl%03d", ++lastSessionID); + Log.v(TDDatabase.TAG, toString() + " STARTING ..."); + running = true; + lastSequence = null; + + fetchRemoteCheckpointDoc(); + } + + public void beginReplicating() { + // This is useful for the first run after the replicator starts + Log.d(getLogTag(), "Called by ChangeTracker"); + scheduleRefiller(); + } + + public void stop() { + if (!running) { + return; + } + Log.v(TDDatabase.TAG, toString() + " STOPPING..."); + batcher.flush(); + continuous = false; + if (asyncTaskCount == 0) { + stopped(); + } + + // All the revisions that have a timestamp for this replicator are reset + // to 0. So that they get picked up in the next run. + resetRevisions(); + } + + public void stopped() { + Log.v(TDDatabase.TAG, toString() + " STOPPED"); + running = false; + this.changesProcessed = this.changesTotal = 0; + + saveLastSequence(); + + if (db != null) { + db.getActiveReplicators().remove(this); + } + + batcher = null; + db = null; + } + + public synchronized void asyncTaskStarted() { + ++asyncTaskCount; + } + + public synchronized void asyncTaskFinished(int numTasks) { + this.asyncTaskCount -= numTasks; + if (asyncTaskCount == 0) { + if (!continuous) { + stopped(); + } + } + } + + public void addToInbox(TDRevision rev) { + if (batcher.count() == 0) { + active = true; + } + batcher.queueObject(rev); + // Log.v(TDDatabase.TAG, String.format("%s: Received #%d %s", + // toString(), rev.getSequence(), rev.toString())); + } + + public void processInbox(TDRevisionList inbox) { + + } + + public void sendAsyncRequest(String method, String relativePath, + Map headers, Object body, + TDRemoteRequestCompletionBlock onCompletion) { + // Log.v(TDDatabase.TAG, String.format("%s: %s .%s", toString(), method, + // relativePath)); + String urlStr = remote.toExternalForm() + relativePath; + try { + URL url = new URL(urlStr); + TDRemoteRequest request = new TDRemoteRequest(workExecutor, + clientFactory, method, url, headers, body, onCompletion); + remoteRequestExecutor.execute(request); + } catch (MalformedURLException e) { + Log.e(TDDatabase.TAG, "Malformed URL for async request", e); + } + } + + public boolean logRevisions(ArrayList revs) { + this.db.beginTransaction(); + boolean success = true; + for (int i = 0; i < revs.size() && success; i++) { + success = success && logRevision(revs.get(i)); + } + this.db.endTransaction(success); + return success; + } + + public boolean logRevision(TDRevision rev) { + return this.db.logRevision(this.remote, isPush(), rev); + } + + public TDRevisionList getPendingRevisions(long lastUpdated) { + return this.db.getPendingRevisions(this.remote, isPush(), lastUpdated); + } + + public void updateLogRevision(TDRevision rev, long lastUpdated) { + this.db.updateLogRevision(getRemote(), isPush(), rev, lastUpdated); + } + + public void removeLogForRevision(TDRevision rev) { + this.db.removeLogForRevision(this.remote, isPush(), rev); + } + + public void resetRevisions() { + if (this.db != null) { + this.db.resetRevisions(this.remote, isPush()); + } + } + + /** CHECKPOINT STORAGE: **/ + + public void maybeCreateRemoteDB() { + // TDPusher overrides this to implement the .createTarget option + } + + /** + * This is the _local document ID stored on the remote server to keep track + * of state. Its ID is based on the local database ID (the private one, to + * make the result unguessable) and the remote database's URL. + */ + public String remoteCheckpointDocID() { + if (db == null) { + return null; + } + String input = db.privateUUID() + "\n" + remote.toExternalForm() + "\n" + + (isPush() ? "1" : "0"); + return TDMisc.TDHexSHA1Digest(input.getBytes()); + } + + public void fetchRemoteCheckpointDoc() { + lastSequenceChanged = false; + final String localLastSequence = db.lastSequenceWithRemoteURL(remote, + isPush()); + if (localLastSequence == null) { + maybeCreateRemoteDB(); + beginReplicating(); + return; + } + + asyncTaskStarted(); + sendAsyncRequest("GET", "/_local/" + remoteCheckpointDocID(), + this.headers, null, new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object result, Throwable e) { + if (e != null + && e instanceof HttpResponseException + && ((HttpResponseException) e).getStatusCode() != 404) { + error = e; + } else { + if (e instanceof HttpResponseException + && ((HttpResponseException) e) + .getStatusCode() == 404) { + maybeCreateRemoteDB(); + } + Map response = (Map) result; + remoteCheckpoint = response; + String remoteLastSequence = null; + if (response != null) { + remoteLastSequence = (String) response + .get("lastSequence"); + } + if (remoteLastSequence != null + && remoteLastSequence + .equals(localLastSequence)) { + lastSequence = localLastSequence; + Log.v(TDDatabase.TAG, this + + ": Replicating from lastSequence=" + + lastSequence); + } else { + Log.v(TDDatabase.TAG, this + + ": lastSequence mismatch: I had " + + localLastSequence + ", remote had " + + remoteLastSequence); + } + beginReplicating(); + } + asyncTaskFinished(1); + } + + }); + } + + public void saveLastSequence() { + if (!lastSequenceChanged) { + return; + } + if (savingCheckpoint) { + // If a save is already in progress, don't do anything. (The + // completion block will trigger + // another save after the first one finishes.) + overdueForSave = true; + return; + } + + lastSequenceChanged = false; + overdueForSave = false; + + Log.v(TDDatabase.TAG, this + " checkpointing sequence=" + lastSequence); + final Map body = new HashMap(); + if (remoteCheckpoint != null) { + body.putAll(remoteCheckpoint); + } + body.put("lastSequence", lastSequence); + + String remoteCheckpointDocID = remoteCheckpointDocID(); + if (remoteCheckpointDocID == null) { + return; + } + savingCheckpoint = true; + sendAsyncRequest("PUT", "/_local/" + remoteCheckpointDocID, + this.headers, body, new TDRemoteRequestCompletionBlock() { + + @Override + public void onCompletion(Object result, Throwable e) { + savingCheckpoint = false; + if (e != null) { + Log.v(TDDatabase.TAG, this + + ": Unable to save remote checkpoint", e); + // TODO: If error is 401 or 403, and this is a pull, + // remember that remote is read-only and don't + // attempt to read its checkpoint next time. + } else { + Map response = (Map) result; + body.put("_rev", response.get("rev")); + remoteCheckpoint = body; + } + if (overdueForSave) { + saveLastSequence(); + } + } + + }); + db.setLastSequence(lastSequence, remote, isPush()); + } + + protected class Refill implements Runnable { + + private long lastUpdated; + + public Refill() { + this(-1); + } + + public Refill(long lastUpdated) { + this.lastUpdated = lastUpdated; + } + + @Override + public void run() { + Log.d(getLogTag(), isPush() ? "PUSH" : "PULL"); + TDRevisionList revisions = getPendingRevisions(lastUpdated); + if (revisions.size() > 0) { + for (TDRevision rev : revisions) { + batcher.queueObject(rev); + } + Log.d(getLogTag(), "Revs count: " + revisions.size() + + ", should I have set the flag?"); + } else { + // The first time we start replication, we will have zero + // changes. We will need to kick start replication when + // changeTracker receives changes + synchronized (refiller_scheduled) { + refiller_scheduled.set(false); + Log.d(getLogTag(), "I set scheduled to false"); + } + } + } + } + + protected void scheduleRefiller() { + scheduleRefiller(-1); + } + + protected void scheduleRefiller(long lastUpdated) { + synchronized (refiller_scheduled) { + if (!refiller_scheduled.get()) { + Log.d(getLogTag(), "started with --" + lastUpdated); + refiller_scheduled.set(true); + workExecutor.submit(new Refill(lastUpdated)); + Log.d(getLogTag(), "started with --" + lastUpdated); + } else { + Log.d(getLogTag(), "Didn't start"); + } + } + } + protected String getLogTag() { + return "ARTOOREFILLER" + (isPush() ? "PUSH" : "PULL"); + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/replicator/changetracker/TDChangeTracker.java b/TouchDB-Android/src/com/couchbase/touchdb/replicator/changetracker/TDChangeTracker.java index 3490169..eb29c0b 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/replicator/changetracker/TDChangeTracker.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/replicator/changetracker/TDChangeTracker.java @@ -7,6 +7,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -43,284 +44,332 @@ */ public class TDChangeTracker implements Runnable { - private URL databaseURL; - private TDChangeTrackerClient client; - private TDChangeTrackerMode mode; - private Object lastSequenceID; - - private Thread thread; - private boolean running = false; - private HttpUriRequest request; - - private String filterName; - private Map filterParams; - - private Throwable error; - - public enum TDChangeTrackerMode { - OneShot, LongPoll, Continuous - } - - public TDChangeTracker(URL databaseURL, TDChangeTrackerMode mode, - Object lastSequenceID, TDChangeTrackerClient client) { - this.databaseURL = databaseURL; - this.mode = mode; - this.lastSequenceID = lastSequenceID; - this.client = client; - } - - public void setFilterName(String filterName) { - this.filterName = filterName; - } - - public void setFilterParams(Map filterParams) { - this.filterParams = filterParams; - } - - public void setClient(TDChangeTrackerClient client) { - this.client = client; - } - - public String getDatabaseName() { - String result = null; - if (databaseURL != null) { - result = databaseURL.getPath(); - if (result != null) { - int pathLastSlashPos = result.lastIndexOf('/'); - if (pathLastSlashPos > 0) { - result = result.substring(pathLastSlashPos); - } - } - } - return result; - } - - public String getChangesFeedPath() { - String path = "_changes?feed="; - switch (mode) { - case OneShot: - path += "normal"; - break; - case LongPoll: - path += "longpoll&limit=50"; - break; - case Continuous: - path += "continuous"; - break; - } - path += "&heartbeat=300000"; - - if(lastSequenceID != null) { - path += "&since=" + URLEncoder.encode(lastSequenceID.toString()); - } - if(filterName != null) { - path += "&filter=" + URLEncoder.encode(filterName); - if(filterParams != null) { - for (String filterParamKey : filterParams.keySet()) { - path += "&" + URLEncoder.encode(filterParamKey) + "=" + URLEncoder.encode(filterParams.get(filterParamKey).toString()); - } - } - } - - return path; - } - - public URL getChangesFeedURL() { - String dbURLString = databaseURL.toExternalForm(); - if(!dbURLString.endsWith("/")) { - dbURLString += "/"; - } - dbURLString += getChangesFeedPath(); - URL result = null; - try { - result = new URL(dbURLString); - } catch(MalformedURLException e) { - Log.e(TDDatabase.TAG, "Changes feed ULR is malformed", e); - } - return result; - } - - @Override - public void run() { - running = true; - HttpClient httpClient = client.getHttpClient(); - while (running) { - - URL url = getChangesFeedURL(); - request = new HttpGet(url.toString()); - - // if the URL contains user info AND if this a DefaultHttpClient - // then preemptively set the auth credentials - if(url.getUserInfo() != null) { - if(url.getUserInfo().contains(":")) { - String[] userInfoSplit = url.getUserInfo().split(":"); - final Credentials creds = new UsernamePasswordCredentials(userInfoSplit[0], userInfoSplit[1]); - if(httpClient instanceof DefaultHttpClient) { - DefaultHttpClient dhc = (DefaultHttpClient)httpClient; - - HttpRequestInterceptor preemptiveAuth = new HttpRequestInterceptor() { - - @Override - public void process(HttpRequest request, - HttpContext context) throws HttpException, - IOException { - AuthState authState = (AuthState) context.getAttribute(ClientContext.TARGET_AUTH_STATE); - CredentialsProvider credsProvider = (CredentialsProvider) context.getAttribute( - ClientContext.CREDS_PROVIDER); - HttpHost targetHost = (HttpHost) context.getAttribute(ExecutionContext.HTTP_TARGET_HOST); - - if (authState.getAuthScheme() == null) { - AuthScope authScope = new AuthScope(targetHost.getHostName(), targetHost.getPort()); - authState.setAuthScheme(new BasicScheme()); - authState.setCredentials(creds); - } - } - }; - - dhc.addRequestInterceptor(preemptiveAuth, 0); - } - } - else { - Log.w(TDDatabase.TAG, "Unable to parse user info, not setting credentials"); - } - } - - try { - String maskedRemoteWithoutCredentials = getChangesFeedURL().toString(); - maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials.replaceAll("://.*:.*@","://---:---@"); - Log.v(TDDatabase.TAG, "Making request to " + maskedRemoteWithoutCredentials); - HttpResponse response = httpClient.execute(request); - StatusLine status = response.getStatusLine(); - if(status.getStatusCode() >= 300) { - Log.e(TDDatabase.TAG, "Change tracker got error " + Integer.toString(status.getStatusCode())); - stop(); - } - HttpEntity entity = response.getEntity(); - if(entity != null) { - try { - InputStream input = entity.getContent(); - if(mode == TDChangeTrackerMode.LongPoll) { - Map fullBody = TDServer.getObjectMapper().readValue(input, Map.class); - boolean responseOK = receivedPollResponse(fullBody); - if(mode == TDChangeTrackerMode.LongPoll && responseOK) { - Log.v(TDDatabase.TAG, "Starting new longpoll"); - continue; - } else { - Log.w(TDDatabase.TAG, "Change tracker calling stop"); - stop(); - } - } - else { - BufferedReader reader = new BufferedReader(new InputStreamReader(input)); - String line = null; - while ((line=reader.readLine()) != null) { - //skip over lines which may be in a non-continuous response - if(line.equals("{\"results\":[") || line.equals("],")) { - continue; - } - else if(line.startsWith("\"last_seq\"") && mode == TDChangeTrackerMode.OneShot) { - Log.w(TDDatabase.TAG, "Change tracker calling stop"); - stop(); - break; - } - receivedChunk(line); - } - Log.v(TDDatabase.TAG, "read null from inpustream continuing"); - } - } finally { - try { entity.consumeContent(); } catch (IOException e){} - } - } - } catch (ClientProtocolException e) { - Log.e(TDDatabase.TAG, "ClientProtocolException in change tracker", e); - } catch (IOException e) { - if(running) { - //we get an exception when we're shutting down and have to - //close the socket underneath our read, ignore that - Log.e(TDDatabase.TAG, "IOException in change tracker", e); - } - } - } - Log.v(TDDatabase.TAG, "Change tracker run loop exiting"); - } - - public boolean receivedChunk(String line) { - if(line.length() > 1) { - try { - Map change = (Map)TDServer.getObjectMapper().readValue(line, Map.class); - if(!receivedChange(change)) { - Log.w(TDDatabase.TAG, String.format("Received unparseable change line from server: %s", line)); - return false; - } - } catch (Exception e) { - Log.w(TDDatabase.TAG, "Exception parsing JSON in change tracker", e); - return false; - } - } - return true; - } - - public boolean receivedChange(final Map change) { - Object seq = change.get("seq"); - if(seq == null) { - return false; - } - //pass the change to the client on the thread that created this change tracker - if(client != null) { - client.changeTrackerReceivedChange(change); - } - lastSequenceID = seq; - return true; - } - - public boolean receivedPollResponse(Map response) { - List> changes = (List)response.get("results"); - if(changes == null) { - return false; - } - for (Map change : changes) { - if(!receivedChange(change)) { - return false; - } - } - return true; - } - - public void setUpstreamError(String message) { - Log.w(TDDatabase.TAG, String.format("Server error: %s", message)); - this.error = new Throwable(message); - } - - public boolean start() { - this.error = null; - thread = new Thread(this, "ChangeTracker-" + databaseURL.toExternalForm()); - thread.start(); - return true; - } - - public void stop() { - Log.d(TDDatabase.TAG, "changed tracker asked to stop"); - running = false; - thread.interrupt(); - if(request != null) { - request.abort(); - } - - stopped(); - } - - public void stopped() { - Log.d(TDDatabase.TAG, "change tracker in stopped"); - if (client != null) { - Log.d(TDDatabase.TAG, "posting stopped"); - client.changeTrackerStopped(TDChangeTracker.this); - } - client = null; - Log.d(TDDatabase.TAG, "change tracker client should be null now"); - } - - public boolean isRunning() { - return running; - } + private URL databaseURL; + private TDChangeTrackerClient client; + private TDChangeTrackerMode mode; + private Object lastSequenceID; + + private Thread thread; + private boolean running = false; + private HttpUriRequest request; + + private String filterName; + private Map filterParams; + + private Throwable error; + + public enum TDChangeTrackerMode { + OneShot, LongPoll, Continuous + } + + public TDChangeTracker(URL databaseURL, TDChangeTrackerMode mode, + Object lastSequenceID, TDChangeTrackerClient client) { + this.databaseURL = databaseURL; + this.mode = mode; + this.lastSequenceID = lastSequenceID; + this.client = client; + } + + public void setFilterName(String filterName) { + this.filterName = filterName; + } + + public void setFilterParams(Map filterParams) { + this.filterParams = filterParams; + } + + public void setClient(TDChangeTrackerClient client) { + this.client = client; + } + + public String getDatabaseName() { + String result = null; + if (databaseURL != null) { + result = databaseURL.getPath(); + if (result != null) { + int pathLastSlashPos = result.lastIndexOf('/'); + if (pathLastSlashPos > 0) { + result = result.substring(pathLastSlashPos); + } + } + } + return result; + } + + public String getChangesFeedPath() { + String path = "_changes?feed="; + switch (mode) { + case OneShot: + path += "normal"; + break; + case LongPoll: + path += "longpoll&limit=50"; + break; + case Continuous: + path += "continuous"; + break; + } + path += "&heartbeat=300000"; + + if (lastSequenceID != null) { + path += "&since=" + URLEncoder.encode(lastSequenceID.toString()); + } + if (filterName != null) { + path += "&filter=" + URLEncoder.encode(filterName); + if (filterParams != null) { + for (String filterParamKey : filterParams.keySet()) { + path += "&" + + URLEncoder.encode(filterParamKey) + + "=" + + URLEncoder.encode(filterParams + .get(filterParamKey).toString()); + } + } + } + return path; + } + + public URL getChangesFeedURL() { + String dbURLString = databaseURL.toExternalForm(); + if (!dbURLString.endsWith("/")) { + dbURLString += "/"; + } + dbURLString += getChangesFeedPath(); + URL result = null; + try { + result = new URL(dbURLString); + } catch (MalformedURLException e) { + Log.e(TDDatabase.TAG, "Changes feed ULR is malformed", e); + } + return result; + } + + @Override + public void run() { + running = true; + HttpClient httpClient = client.getHttpClient(); + while (running) { + + URL url = getChangesFeedURL(); + request = new HttpGet(url.toString()); + + // if the URL contains user info AND if this a DefaultHttpClient + // then preemptively set the auth credentials + if (url.getUserInfo() != null) { + if (url.getUserInfo().contains(":")) { + String[] userInfoSplit = url.getUserInfo().split(":"); + final Credentials creds = new UsernamePasswordCredentials( + userInfoSplit[0], userInfoSplit[1]); + if (httpClient instanceof DefaultHttpClient) { + DefaultHttpClient dhc = (DefaultHttpClient) httpClient; + + HttpRequestInterceptor preemptiveAuth = new HttpRequestInterceptor() { + + @Override + public void process(HttpRequest request, + HttpContext context) throws HttpException, + IOException { + AuthState authState = (AuthState) context + .getAttribute(ClientContext.TARGET_AUTH_STATE); + CredentialsProvider credsProvider = (CredentialsProvider) context + .getAttribute(ClientContext.CREDS_PROVIDER); + HttpHost targetHost = (HttpHost) context + .getAttribute(ExecutionContext.HTTP_TARGET_HOST); + + if (authState.getAuthScheme() == null) { + AuthScope authScope = new AuthScope( + targetHost.getHostName(), + targetHost.getPort()); + authState.setAuthScheme(new BasicScheme()); + authState.setCredentials(creds); + } + } + }; + + dhc.addRequestInterceptor(preemptiveAuth, 0); + } + } else { + Log.w(TDDatabase.TAG, + "Unable to parse user info, not setting credentials"); + } + } + + try { + String maskedRemoteWithoutCredentials = getChangesFeedURL() + .toString(); + maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials + .replaceAll("://.*:.*@", "://---:---@"); + Log.v(TDDatabase.TAG, "Making request to " + + maskedRemoteWithoutCredentials); + HttpResponse response = httpClient.execute(request); + StatusLine status = response.getStatusLine(); + if (status.getStatusCode() >= 300) { + Log.e(TDDatabase.TAG, + "Change tracker got error " + + Integer.toString(status.getStatusCode())); + stop(); + } + HttpEntity entity = response.getEntity(); + if (entity != null) { + try { + InputStream input = entity.getContent(); + if (mode == TDChangeTrackerMode.LongPoll) { + BufferedReader reader = new BufferedReader( + new InputStreamReader(input)); + String line = null; + StringBuffer sb = new StringBuffer(); + while ((line = reader.readLine()) != null) { + sb.append(line).append("\n"); + } + String content = sb.toString(); + if ("{\"results\":[".equals(content.trim())) { + // No more pending changes; send an empty map + client.changeTrackerReceivedChange(new HashMap()); + continue; + } else { + Map fullBody = TDServer + .getObjectMapper().readValue(content, + Map.class); + boolean responseOK = receivedPollResponse(fullBody); + if (responseOK) { + Log.v(TDDatabase.TAG, + "Starting new longpoll"); + continue; + } else { + Log.w(TDDatabase.TAG, + "Change tracker calling stop"); + stop(); + } + } + } else { + BufferedReader reader = new BufferedReader( + new InputStreamReader(input)); + String line = null; + while ((line = reader.readLine()) != null) { + // skip over lines which may be in a + // non-continuous response + if (line.equals("{\"results\":[") + || line.equals("],")) { + continue; + } else if (line.startsWith("\"last_seq\"") + && mode == TDChangeTrackerMode.OneShot) { + Log.w(TDDatabase.TAG, + "Change tracker calling stop"); + stop(); + break; + } + receivedChunk(line); + } + Log.v(TDDatabase.TAG, + "read null from inpustream continuing"); + } + } finally { + try { + entity.consumeContent(); + } catch (IOException e) { + } + } + } + } catch (ClientProtocolException e) { + Log.e(TDDatabase.TAG, + "ClientProtocolException in change tracker", e); + stop(); + } catch (IOException e) { + if (running) { + // we get an exception when we're shutting down and have to + // close the socket underneath our read, ignore that + Log.e(TDDatabase.TAG, "IOException in change tracker", e); + // The tracker keeps trying again and again + stop(); + } + } + } + Log.v(TDDatabase.TAG, "Change tracker run loop exiting"); + } + + public boolean receivedChunk(String line) { + if (line.length() > 1) { + try { + Map change = (Map) TDServer.getObjectMapper() + .readValue(line, Map.class); + if (!receivedChange(change)) { + Log.w(TDDatabase.TAG, String.format( + "Received unparseable change line from server: %s", + line)); + return false; + } + } catch (Exception e) { + Log.w(TDDatabase.TAG, + "Exception parsing JSON in change tracker", e); + return false; + } + } + return true; + } + + public boolean receivedChange(final Map change) { + Object seq = change.get("seq"); + if (seq == null) { + return false; + } + // pass the change to the client on the thread that created this change + // tracker + if (client != null) { + client.changeTrackerReceivedChange(change); + } + lastSequenceID = seq; + return true; + } + + public boolean receivedPollResponse(Map response) { + List> changes = (List) response.get("results"); + if (changes == null) { + return false; + } + for (Map change : changes) { + if (!receivedChange(change)) { + return false; + } + } + return true; + } + + public void setUpstreamError(String message) { + Log.w(TDDatabase.TAG, String.format("Server error: %s", message)); + this.error = new Throwable(message); + } + + public boolean start() { + this.error = null; + thread = new Thread(this, "ChangeTracker-" + + databaseURL.toExternalForm()); + thread.start(); + return true; + } + + public void stop() { + Log.d(TDDatabase.TAG, "changed tracker asked to stop"); + running = false; + thread.interrupt(); + if (request != null) { + request.abort(); + } + + stopped(); + } + + public void stopped() { + Log.d(TDDatabase.TAG, "change tracker in stopped"); + if (client != null) { + Log.d(TDDatabase.TAG, "posting stopped"); + client.changeTrackerStopped(TDChangeTracker.this); + } + client = null; + Log.d(TDDatabase.TAG, "change tracker client should be null now"); + } + + public boolean isRunning() { + return running; + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/router/TDRouter.java b/TouchDB-Android/src/com/couchbase/touchdb/router/TDRouter.java index c83542e..96b6f60 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/router/TDRouter.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/router/TDRouter.java @@ -41,1399 +41,1543 @@ import com.couchbase.touchdb.replicator.TDPusher; import com.couchbase.touchdb.replicator.TDReplicator; - public class TDRouter implements Observer { - private TDServer server; - private TDDatabase db; - private TDURLConnection connection; - private Map queries; - private boolean changesIncludesDocs = false; - private TDRouterCallbackBlock callbackBlock; - private boolean responseSent = false; - private boolean waiting = false; - private TDFilterBlock changesFilter; - private boolean longpoll = false; - - public static String getVersionString() { - return TouchDBVersion.TouchDBVersionNumber; - } - - public TDRouter(TDServer server, TDURLConnection connection) { - this.server = server; - this.connection = connection; - } - - public void setCallbackBlock(TDRouterCallbackBlock callbackBlock) { - this.callbackBlock = callbackBlock; - } - - public Map getQueries() { - if(queries == null) { - String queryString = connection.getURL().getQuery(); - if(queryString != null && queryString.length() > 0) { - queries = new HashMap(); - for (String component : queryString.split("&")) { - int location = component.indexOf('='); - if(location > 0) { - String key = component.substring(0, location); - String value = component.substring(location + 1); - queries.put(key, value); - } - } - - } - } - return queries; - } - - public String getQuery(String param) { - Map queries = getQueries(); - if(queries != null) { - String value = queries.get(param); - if(value != null) { - return URLDecoder.decode(value); - } - } - return null; - } - - public boolean getBooleanQuery(String param) { - String value = getQuery(param); - return (value != null) && !"false".equals(value) && !"0".equals(value); - } - - public int getIntQuery(String param, int defaultValue) { - int result = defaultValue; - String value = getQuery(param); - if(value != null) { - try { - result = Integer.parseInt(value); - } catch (NumberFormatException e) { - //ignore, will return default value - } - } - - return result; - } - - public Object getJSONQuery(String param) { - String value = getQuery(param); - if(value == null) { - return null; - } - Object result = null; - try { - result = TDServer.getObjectMapper().readValue(value, Object.class); - } catch (Exception e) { - Log.w("Unable to parse JSON Query", e); - } - return result; - } - - public boolean cacheWithEtag(String etag) { - String eTag = String.format("\"%s\"", etag); - connection.getResHeader().add("Etag", eTag); - String requestIfNoneMatch = connection.getRequestProperty("If-None-Match"); - return eTag.equals(requestIfNoneMatch); - } - - public Map getBodyAsDictionary() { - try { - InputStream contentStream = connection.getRequestInputStream(); - Map bodyMap = TDServer.getObjectMapper().readValue(contentStream, Map.class); - return bodyMap; - } catch (IOException e) { - return null; - } - } - - public EnumSet getContentOptions() { - EnumSet result = EnumSet.noneOf(TDContentOptions.class); - if(getBooleanQuery("attachments")) { - result.add(TDContentOptions.TDIncludeAttachments); - } - if(getBooleanQuery("local_seq")) { - result.add(TDContentOptions.TDIncludeLocalSeq); - } - if(getBooleanQuery("conflicts")) { - result.add(TDContentOptions.TDIncludeConflicts); - } - if(getBooleanQuery("revs")) { - result.add(TDContentOptions.TDIncludeRevs); - } - if(getBooleanQuery("revs_info")) { - result.add(TDContentOptions.TDIncludeRevsInfo); - } - return result; - } - - public boolean getQueryOptions(TDQueryOptions options) { - // http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options - options.setSkip(getIntQuery("skip", options.getSkip())); - options.setLimit(getIntQuery("limit", options.getLimit())); - options.setGroupLevel(getIntQuery("group_level", options.getGroupLevel())); - options.setDescending(getBooleanQuery("descending")); - options.setIncludeDocs(getBooleanQuery("include_docs")); - options.setUpdateSeq(getBooleanQuery("update_seq")); - if(getQuery("inclusive_end") != null) { - options.setInclusiveEnd(getBooleanQuery("inclusive_end")); - } - if(getQuery("reduce") != null) { - options.setReduce(getBooleanQuery("reduce")); - } - options.setGroup(getBooleanQuery("group")); - options.setContentOptions(getContentOptions()); - options.setStartKey(getJSONQuery("startkey")); - options.setEndKey(getJSONQuery("endkey")); - Object key = getJSONQuery("key"); - if(key != null) { - List keys = new ArrayList(); - keys.add(key); - options.setKeys(keys); - } - return true; - } - - public String getMultipartRequestType() { - String accept = connection.getRequestProperty("Accept"); - if(accept.startsWith("multipart/")) { - return accept; - } - return null; - } - - public TDStatus openDB() { - if(db == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - if(!db.exists()) { - return new TDStatus(TDStatus.NOT_FOUND); - } - if(!db.open()) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - return new TDStatus(TDStatus.OK); - } - - public static List splitPath(URL url) { - String pathString = url.getPath(); - if(pathString.startsWith("/")) { - pathString = pathString.substring(1); - } - List result = new ArrayList(); - //we want empty string to return empty list - if(pathString.length() == 0) { - return result; - } - for (String component : pathString.split("/")) { - result.add(URLDecoder.decode(component)); - } - return result; - } - - public void sendResponse() { - if(!responseSent) { - responseSent = true; - if(callbackBlock != null) { - callbackBlock.onResponseReady(); - } - } - } - - public void start() { - // Refer to: http://wiki.apache.org/couchdb/Complete_HTTP_API_Reference - - // We're going to map the request into a method call using reflection based on the method and path. - // Accumulate the method name into the string 'message': - String method = connection.getRequestMethod(); - if("HEAD".equals(method)) { - method = "GET"; - } - String message = String.format("do_%s", method); - - // First interpret the components of the request: - List path = splitPath(connection.getURL()); - if(path == null) { - connection.setResponseCode(TDStatus.BAD_REQUEST); - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - sendResponse(); - return; - } - - int pathLen = path.size(); - if(pathLen > 0) { - String dbName = path.get(0); - if(dbName.startsWith("_")) { - message += dbName; // special root path, like /_all_dbs - } else { - message += "_Database"; - db = server.getDatabaseNamed(dbName); - if(db == null) { - connection.setResponseCode(TDStatus.BAD_REQUEST); - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - sendResponse(); - return; - } - } - } else { - message += "Root"; - } - - String docID = null; - if(db != null && pathLen > 1) { - message = message.replaceFirst("_Database", "_Document"); - // Make sure database exists, then interpret doc name: - TDStatus status = openDB(); - if(!status.isSuccessful()) { - connection.setResponseCode(status.getCode()); - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - sendResponse(); - return; - } - String name = path.get(1); - if(!name.startsWith("_")) { - // Regular document - if(!TDDatabase.isValidDocumentId(name)) { - connection.setResponseCode(TDStatus.BAD_REQUEST); - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - sendResponse(); - return; - } - docID = name; - } else if("_design".equals(name) || "_local".equals(name)) { - // "_design/____" and "_local/____" are document names - if(pathLen <= 2) { - connection.setResponseCode(TDStatus.NOT_FOUND); - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - sendResponse(); - return; - } - docID = name + "/" + path.get(2); - path.set(1, docID); - path.remove(2); - pathLen--; - } else if(name.startsWith("_design") || name.startsWith("_local")) { - // This is also a document, just with a URL-encoded "/" - docID = name; - } else { - // Special document name like "_all_docs": - message += name; - if(pathLen > 2) { - List subList = path.subList(2, pathLen-1); - StringBuilder sb = new StringBuilder(); - Iterator iter = subList.iterator(); - while(iter.hasNext()) { - sb.append(iter.next()); - if(iter.hasNext()) { - sb.append("/"); - } - } - docID = sb.toString(); - } - } - } - - String attachmentName = null; - if(docID != null && pathLen > 2) { - message = message.replaceFirst("_Document", "_Attachment"); - // Interpret attachment name: - attachmentName = path.get(2); - if(attachmentName.startsWith("_") && docID.startsWith("_design")) { - // Design-doc attribute like _info or _view - message = message.replaceFirst("_Attachment", "_DesignDocument"); - docID = docID.substring(8); // strip the "_design/" prefix - attachmentName = pathLen > 3 ? path.get(3) : null; - } else { - if (pathLen > 3) { - List subList = path.subList(2, pathLen); - StringBuilder sb = new StringBuilder(); - Iterator iter = subList.iterator(); - while(iter.hasNext()) { - sb.append(iter.next()); - if(iter.hasNext()) { - //sb.append("%2F"); - sb.append("/"); - } - } - attachmentName = sb.toString(); - } - } - } - - //Log.d(TAG, "path: " + path + " message: " + message + " docID: " + docID + " attachmentName: " + attachmentName); - - // Send myself a message based on the components: - TDStatus status = new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - try { - Method m = this.getClass().getMethod(message, TDDatabase.class, String.class, String.class); - status = (TDStatus)m.invoke(this, db, docID, attachmentName); - } catch (NoSuchMethodException msme) { - try { - Method m = this.getClass().getMethod("do_UNKNOWN", TDDatabase.class, String.class, String.class); - status = (TDStatus)m.invoke(this, db, docID, attachmentName); - } catch (Exception e) { - //default status is internal server error - } - } catch (Exception e) { - //default status is internal server error - Log.e(TDDatabase.TAG, "Exception in TDRouter", e); - } - - // Configure response headers: - if(status.isSuccessful() && connection.getResponseBody() == null && connection.getHeaderField("Content-Type") == null) { - connection.setResponseBody(new TDBody("{\"ok\":true}".getBytes())); - } - - if(connection.getResponseBody() != null && connection.getResponseBody().isValidJSON()) { - connection.getResHeader().add("Content-Type", "application/json"); - } - - // Check for a mismatch between the Accept request header and the response type: - String accept = connection.getRequestProperty("Accept"); - if(accept != null && !"*/*".equals(accept)) { - String responseType = connection.getBaseContentType(); - if(responseType != null && accept.indexOf(responseType) < 0) { - Log.e(TDDatabase.TAG, String.format("Error 406: Can't satisfy request Accept: %s", accept)); - status = new TDStatus(TDStatus.NOT_ACCEPTABLE); - } - } - - connection.getResHeader().add("Server", String.format("TouchDB %s", getVersionString())); - - // If response is ready (nonzero status), tell my client about it: - if(status.getCode() != 0) { - connection.setResponseCode(status.getCode()); - - if(connection.getResponseBody() != null) { - ByteArrayInputStream bais = new ByteArrayInputStream(connection.getResponseBody().getJson()); - connection.setResponseInputStream(bais); - } else { - - try { - connection.getResponseOutputStream().close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "Error closing empty output stream"); - } - } - sendResponse(); - } - } - - public void stop() { - callbackBlock = null; - if(db != null) { - db.deleteObserver(this); - } - } - - public TDStatus do_UNKNOWN(TDDatabase db, String docID, String attachmentName) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - /*************************************************************************************************/ - /*** TDRouter+Handlers ***/ - /*************************************************************************************************/ - - public void setResponseLocation(URL url) { - String location = url.toExternalForm(); - String query = url.getQuery(); - if(query != null) { - int startOfQuery = location.indexOf(query); - if(startOfQuery > 0) { - location = location.substring(0, startOfQuery); - } - } - connection.getResHeader().add("Location", location); - } - - /** SERVER REQUESTS: **/ - - public TDStatus do_GETRoot(TDDatabase _db, String _docID, String _attachmentName) { - Map info = new HashMap(); - info.put("TouchDB", "Welcome"); - info.put("couchdb", "Welcome"); // for compatibility - info.put("version", getVersionString()); - connection.setResponseBody(new TDBody(info)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_all_dbs(TDDatabase _db, String _docID, String _attachmentName) { - List dbs = server.allDatabaseNames(); - connection.setResponseBody(new TDBody(dbs)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_session(TDDatabase _db, String _docID, String _attachmentName) { - // Send back an "Admin Party"-like response - Map session= new HashMap(); - Map userCtx = new HashMap(); - String[] roles = {"_admin"}; - session.put("ok", true); - userCtx.put("name", null); - userCtx.put("roles", roles); - session.put("userCtx", userCtx); - connection.setResponseBody(new TDBody(session)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_POST_replicate(TDDatabase _db, String _docID, String _attachmentName) { - // Extract the parameters from the JSON request body: - // http://wiki.apache.org/couchdb/Replication - Map body = getBodyAsDictionary(); - if(body == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - String source = (String)body.get("source"); - String target = (String)body.get("target"); - Boolean createTargetBoolean = (Boolean)body.get("create_target"); - boolean createTarget = (createTargetBoolean != null && createTargetBoolean.booleanValue()); - Boolean continuousBoolean = (Boolean)body.get("continuous"); - boolean continuous = (continuousBoolean != null && continuousBoolean.booleanValue()); - Boolean cancelBoolean = (Boolean)body.get("cancel"); - boolean cancel = (cancelBoolean != null && cancelBoolean.booleanValue()); - - // Map the 'source' and 'target' JSON params to a local database and remote URL: - if(source == null || target == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - boolean push = false; - TDDatabase db = server.getExistingDatabaseNamed(source); - String remoteStr = null; - if(db != null) { - remoteStr = target; - push = true; - } else { - remoteStr = source; - if(createTarget && !cancel) { - db = server.getDatabaseNamed(target); - if(!db.open()) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - } else { - db = server.getExistingDatabaseNamed(target); - } - if(db == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - } - - URL remote = null; - try { - remote = new URL(remoteStr); - } catch (MalformedURLException e) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - if(remote == null || !remote.getProtocol().startsWith("http")) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - if(!cancel) { - // Start replication: - TDReplicator repl = db.getReplicator(remote, server.getDefaultHttpClientFactory(), push, continuous, server.getWorkExecutor()); - if(repl == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - String filterName = (String)body.get("filter"); - if(filterName != null) { - repl.setFilterName(filterName); - Map filterParams = (Map)body.get("query_params"); - if(filterParams != null) { - repl.setFilterParams(filterParams); - } - } - - if(push) { - ((TDPusher)repl).setCreateTarget(createTarget); - } - repl.start(); - Map result = new HashMap(); - result.put("session_id", repl.getSessionID()); - connection.setResponseBody(new TDBody(result)); - } else { - // Cancel replication: - TDReplicator repl = db.getActiveReplicator(remote, push); - if(repl == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - repl.stop(); - } - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_uuids(TDDatabase _db, String _docID, String _attachmentName) { - int count = Math.min(1000, getIntQuery("count", 1)); - List uuids = new ArrayList(count); - for(int i=0; i result = new HashMap(); - result.put("uuids", uuids); - connection.setResponseBody(new TDBody(result)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_active_tasks(TDDatabase _db, String _docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HttpGetActiveTasks - List> activities = new ArrayList>(); - for (TDDatabase db : server.allOpenDatabases()) { - List activeReplicators = db.getActiveReplicators(); - if(activeReplicators != null) { - for (TDReplicator replicator : activeReplicators) { - String source = replicator.getRemote().toExternalForm(); - String target = db.getName(); - if(replicator.isPush()) { - String tmp = source; - source = target; - target = tmp; - } - int processed = replicator.getChangesProcessed(); - int total = replicator.getChangesTotal(); - String status = String.format("Processed %d / %d changes", processed, total); - int progress = (total > 0) ? Math.round(100 * processed / (float)total) : 0; - Map activity = new HashMap(); - activity.put("type", "Replication"); - activity.put("task", replicator.getSessionID()); - activity.put("source", source); - activity.put("target", target); - activity.put("status", status); - activity.put("progress", progress); - activities.add(activity); - } - } - } - connection.setResponseBody(new TDBody(activities)); - return new TDStatus(TDStatus.OK); - } - - /** DATABASE REQUESTS: **/ - - public TDStatus do_GET_Database(TDDatabase _db, String _docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HTTP_database_API#Database_Information - TDStatus status = openDB(); - if(!status.isSuccessful()) { - return status; - } - int num_docs = db.getDocumentCount(); - long update_seq = db.getLastSequence(); - Map result = new HashMap(); - result.put("db_name", db.getName()); - result.put("db_uuid", db.publicUUID()); - result.put("doc_count", num_docs); - result.put("update_seq", update_seq); - result.put("disk_size", db.totalDataSize()); - connection.setResponseBody(new TDBody(result)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_PUT_Database(TDDatabase _db, String _docID, String _attachmentName) { - if(db.exists()) { - return new TDStatus(TDStatus.PRECONDITION_FAILED); - } - if(!db.open()) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - setResponseLocation(connection.getURL()); - return new TDStatus(TDStatus.CREATED); - } - - public TDStatus do_DELETE_Database(TDDatabase _db, String _docID, String _attachmentName) { - if(getQuery("rev") != null) { - return new TDStatus(TDStatus.BAD_REQUEST); // CouchDB checks for this; probably meant to be a document deletion - } - return server.deleteDatabaseNamed(db.getName()) ? new TDStatus(TDStatus.OK) : new TDStatus(TDStatus.NOT_FOUND); - } - - public TDStatus do_POST_Database(TDDatabase _db, String _docID, String _attachmentName) { - TDStatus status = openDB(); - if(!status.isSuccessful()) { - return status; - } - return update(db, null, getBodyAsDictionary(), false); - } - - public TDStatus do_GET_Document_all_docs(TDDatabase _db, String _docID, String _attachmentName) { - TDQueryOptions options = new TDQueryOptions(); - if(!getQueryOptions(options)) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - Map result = db.getAllDocs(options); - if(result == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - connection.setResponseBody(new TDBody(result)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_POST_Document_all_docs(TDDatabase _db, String _docID, String _attachmentName) { - TDQueryOptions options = new TDQueryOptions(); - if (!getQueryOptions(options)) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - Map body = getBodyAsDictionary(); - if (body == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - Map result = null; - if (body.containsKey("keys") && body.get("keys") instanceof ArrayList) { - ArrayList keys = (ArrayList) body.get("keys"); - result = db.getDocsWithIDs(keys, options); - } else { - result = db.getAllDocs(options); - } - - if (result == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - connection.setResponseBody(new TDBody(result)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_POST_Document_bulk_docs(TDDatabase _db, String _docID, String _attachmentName) { - Map bodyDict = getBodyAsDictionary(); - if(bodyDict == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - List> docs = (List>) bodyDict.get("docs"); - - boolean allObj = false; - if(getQuery("all_or_nothing") == null || (getQuery("all_or_nothing") != null && (new Boolean(getQuery("all_or_nothing"))))) { - allObj = true; - } - // allowConflict If false, an error status 409 will be returned if the insertion would create a conflict, i.e. if the previous revision already has a child. - boolean allOrNothing = (allObj && allObj != false); - boolean noNewEdits = true; - if(getQuery("new_edits") == null || (getQuery("new_edits") != null && (new Boolean(getQuery("new_edits"))))) { - noNewEdits = false; - } - boolean ok = false; - db.beginTransaction(); - List> results = new ArrayList>(); - try { - for (Map doc : docs) { - String docID = (String) doc.get("_id"); - TDRevision rev = null; - TDStatus status = new TDStatus(TDStatus.BAD_REQUEST); - TDBody docBody = new TDBody(doc); - if (noNewEdits) { - rev = new TDRevision(docBody); - if(rev.getRevId() == null || rev.getDocId() == null || !rev.getDocId().equals(docID)) { - status = new TDStatus(TDStatus.BAD_REQUEST); - } else { - List history = TDDatabase.parseCouchDBRevisionHistory(doc); - status = db.forceInsert(rev, history, null); - } - } else { - TDStatus outStatus = new TDStatus(); - rev = update(db, docID, docBody, false, allOrNothing, outStatus); - status.setCode(outStatus.getCode()); - } - Map result = null; - if(status.isSuccessful()) { - result = new HashMap(); - result.put("ok", true); - result.put("id", docID); - if (rev != null) { - result.put("rev", rev.getRevId()); - } - } else if(allOrNothing) { - return status; // all_or_nothing backs out if there's any error - } else if(status.getCode() == TDStatus.FORBIDDEN) { - result = new HashMap(); - result.put("error", "validation failed"); - result.put("id", docID); - } else if(status.getCode() == TDStatus.CONFLICT) { - result = new HashMap(); - result.put("error", "conflict"); - result.put("id", docID); - } else { - return status; // abort the whole thing if something goes badly wrong - } - if(result != null) { - results.add(result); - } - } - Log.w(TDDatabase.TAG, String.format("%s finished inserting %d revisions in bulk", this, docs.size())); - ok = true; - } catch (Exception e) { - Log.w(TDDatabase.TAG, String.format("%s: Exception inserting revisions in bulk", this), e); - } finally { - db.endTransaction(ok); - } - Log.d(TDDatabase.TAG, "results: " + results.toString()); - connection.setResponseBody(new TDBody(results)); - return new TDStatus(TDStatus.CREATED); - } - - public TDStatus do_POST_Document_revs_diff(TDDatabase _db, String _docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HttpPostRevsDiff - // Collect all of the input doc/revision IDs as TDRevisions: - TDRevisionList revs = new TDRevisionList(); - Map body = getBodyAsDictionary(); - if(body == null) { - return new TDStatus(TDStatus.BAD_JSON); - } - for (String docID : body.keySet()) { - List revIDs = (List)body.get(docID); - for (String revID : revIDs) { - TDRevision rev = new TDRevision(docID, revID, false); - revs.add(rev); - } - } - - // Look them up, removing the existing ones from revs: - if(!db.findMissingRevisions(revs)) { - return new TDStatus(TDStatus.DB_ERROR); - } - - // Return the missing revs in a somewhat different format: - Map diffs = new HashMap(); - for (TDRevision rev : revs) { - String docID = rev.getDocId(); - - List missingRevs = null; - Map idObj = (Map)diffs.get(docID); - if(idObj != null) { - missingRevs = (List)idObj.get("missing"); - } else { - idObj = new HashMap(); - } - - if(missingRevs == null) { - missingRevs = new ArrayList(); - idObj.put("missing", missingRevs); - diffs.put(docID, idObj); - } - missingRevs.add(rev.getRevId()); - } - - // FIXME add support for possible_ancestors - - connection.setResponseBody(new TDBody(diffs)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_POST_Document_compact(TDDatabase _db, String _docID, String _attachmentName) { - TDStatus status = _db.compact(); - if (status.getCode() < 300) { - TDStatus outStatus = new TDStatus(); - outStatus.setCode(202); // CouchDB returns 202 'cause it's an async operation - return outStatus; - } else { - return status; - } - } - - public TDStatus do_POST_Document_ensure_full_commit(TDDatabase _db, String _docID, String _attachmentName) { - return new TDStatus(TDStatus.OK); - } - - /** CHANGES: **/ - - public Map changesDictForRevision(TDRevision rev) { - Map changesDict = new HashMap(); - changesDict.put("rev", rev.getRevId()); - - List> changes = new ArrayList>(); - changes.add(changesDict); - - Map result = new HashMap(); - result.put("seq", rev.getSequence()); - result.put("id", rev.getDocId()); - result.put("changes", changes); - if(rev.isDeleted()) { - result.put("deleted", true); - } - if(changesIncludesDocs) { - result.put("doc", rev.getProperties()); - } - return result; - } - - public Map responseBodyForChanges(List changes, long since) { - List> results = new ArrayList>(); - for (TDRevision rev : changes) { - Map changeDict = changesDictForRevision(rev); - results.add(changeDict); - } - if(changes.size() > 0) { - since = changes.get(changes.size() - 1).getSequence(); - } - Map result = new HashMap(); - result.put("results", results); - result.put("last_seq", since); - return result; - } - - public Map responseBodyForChangesWithConflicts(List changes, long since) { - // Assumes the changes are grouped by docID so that conflicts will be adjacent. - List> entries = new ArrayList>(); - String lastDocID = null; - Map lastEntry = null; - for (TDRevision rev : changes) { - String docID = rev.getDocId(); - if(docID.equals(lastDocID)) { - Map changesDict = new HashMap(); - changesDict.put("rev", rev.getRevId()); - List> inchanges = (List>)lastEntry.get("changes"); - inchanges.add(changesDict); - } else { - lastEntry = changesDictForRevision(rev); - entries.add(lastEntry); - lastDocID = docID; - } - } - // After collecting revisions, sort by sequence: - Collections.sort(entries, new Comparator>() { - public int compare(Map e1, Map e2) { - return TDMisc.TDSequenceCompare((Long)e1.get("seq"), (Long)e2.get("seq")); - } - }); - - Long lastSeq = (Long)entries.get(entries.size() - 1).get("seq"); - if(lastSeq == null) { - lastSeq = since; - } - - Map result = new HashMap(); - result.put("results", entries); - result.put("last_seq", lastSeq); - return result; - } - - public void sendContinuousChange(TDRevision rev) { - Map changeDict = changesDictForRevision(rev); - try { - String jsonString = TDServer.getObjectMapper().writeValueAsString(changeDict); - if(callbackBlock != null) { - byte[] json = (jsonString + "\n").getBytes(); - OutputStream os = connection.getResponseOutputStream(); - try { - os.write(json); - os.flush(); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "IOException writing to internal streams", e); - } - } - } catch (Exception e) { - Log.w("Unable to serialize change to JSON", e); - } - } - - @Override - public void update(Observable observable, Object changeObject) { - if(observable == db) { - //make sure we're listening to the right events - Map changeNotification = (Map)changeObject; - - TDRevision rev = (TDRevision)changeNotification.get("rev"); - - if(changesFilter != null && !changesFilter.filter(rev)) { - return; - } - - if(longpoll) { - Log.w(TDDatabase.TAG, "TDRouter: Sending longpoll response"); - sendResponse(); - List revs = new ArrayList(); - revs.add(rev); - Map body = responseBodyForChanges(revs, 0); - if(callbackBlock != null) { - byte[] data = null; - try { - data = TDServer.getObjectMapper().writeValueAsBytes(body); - } catch (Exception e) { - Log.w(TDDatabase.TAG, "Error serializing JSON", e); - } - OutputStream os = connection.getResponseOutputStream(); - try { - os.write(data); - os.close(); - } catch (IOException e) { - Log.e(TDDatabase.TAG, "IOException writing to internal streams", e); - } - } - } else { - Log.w(TDDatabase.TAG, "TDRouter: Sending continous change chunk"); - sendContinuousChange(rev); - } - - } - - } - - public TDStatus do_GET_Document_changes(TDDatabase _db, String docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HTTP_database_API#Changes - TDChangesOptions options = new TDChangesOptions(); - changesIncludesDocs = getBooleanQuery("include_docs"); - options.setIncludeDocs(changesIncludesDocs); - String style = getQuery("style"); - if(style != null && style.equals("all_docs")) { - options.setIncludeConflicts(true); - } - options.setContentOptions(getContentOptions()); - options.setSortBySequence(!options.isIncludeConflicts()); - options.setLimit(getIntQuery("limit", options.getLimit())); - - int since = getIntQuery("since", 0); - - String filterName = getQuery("filter"); - if(filterName != null) { - changesFilter = db.getFilterNamed(filterName); - if(changesFilter == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - } - - TDRevisionList changes = db.changesSince(since, options, changesFilter); - - if(changes == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - - String feed = getQuery("feed"); - longpoll = "longpoll".equals(feed); - boolean continuous = !longpoll && "continuous".equals(feed); - - if(continuous || (longpoll && changes.size() == 0)) { - connection.setChunked(true); - connection.setResponseCode(TDStatus.OK); - sendResponse(); - if(continuous) { - for (TDRevision rev : changes) { - sendContinuousChange(rev); - } - } - db.addObserver(this); - // Don't close connection; more data to come - return new TDStatus(0); - } else { - if(options.isIncludeConflicts()) { - connection.setResponseBody(new TDBody(responseBodyForChangesWithConflicts(changes, since))); - } else { - connection.setResponseBody(new TDBody(responseBodyForChanges(changes, since))); - } - return new TDStatus(TDStatus.OK); - } - } - - /** DOCUMENT REQUESTS: **/ - - public String getRevIDFromIfMatchHeader() { - String ifMatch = connection.getRequestProperty("If-Match"); - if(ifMatch == null) { - return null; - } - // Value of If-Match is an ETag, so have to trim the quotes around it: - if(ifMatch.length() > 2 && ifMatch.startsWith("\"") && ifMatch.endsWith("\"")) { - return ifMatch.substring(1,ifMatch.length() - 2); - } else { - return null; - } - } - - public String setResponseEtag(TDRevision rev) { - String eTag = String.format("\"%s\"", rev.getRevId()); - connection.getResHeader().add("Etag", eTag); - return eTag; - } - - public TDStatus do_GET_Document(TDDatabase _db, String docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HTTP_Document_API#GET - boolean isLocalDoc = docID.startsWith("_local"); - EnumSet options = getContentOptions(); - String openRevsParam = getQuery("open_revs"); - if(openRevsParam == null || isLocalDoc) { - // Regular GET: - String revID = getQuery("rev"); // often null - TDRevision rev = null; - if(isLocalDoc) { - rev = db.getLocalDocument(docID, revID); - } else { - rev = db.getDocumentWithIDAndRev(docID, revID, options); - // Handle ?atts_since query by stubbing out older attachments: - //?atts_since parameter - value is a (URL-encoded) JSON array of one or more revision IDs. - // The response will include the content of only those attachments that changed since the given revision(s). - //(You can ask for this either in the default JSON or as multipart/related, as previously described.) - List attsSince = (List)getJSONQuery("atts_since"); - if (attsSince != null) { - String ancestorId = db.findCommonAncestorOf(rev, attsSince); - if (ancestorId != null) { - int generation = TDRevision.generationFromRevID(ancestorId); - db.stubOutAttachmentsIn(rev, generation + 1); - } - } - } - if(rev == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - if(cacheWithEtag(rev.getRevId())) { - return new TDStatus(TDStatus.NOT_MODIFIED); // set ETag and check conditional GET - } - - connection.setResponseBody(rev.getBody()); - } else { - List> result = null; - if(openRevsParam.equals("all")) { - // Get all conflicting revisions: - TDRevisionList allRevs = db.getAllRevisionsOfDocumentID(docID, true); - result = new ArrayList>(allRevs.size()); - for (TDRevision rev : allRevs) { - TDStatus status = db.loadRevisionBody(rev, options); - if(status.isSuccessful()) { - Map dict = new HashMap(); - dict.put("ok", rev.getProperties()); - result.add(dict); - } else if(status.getCode() != TDStatus.INTERNAL_SERVER_ERROR) { - Map dict = new HashMap(); - dict.put("missing", rev.getRevId()); - result.add(dict); - } else { - return status; // internal error getting revision - } - } - } else { - // ?open_revs=[...] returns an array of revisions of the document: - List openRevs = (List)getJSONQuery("open_revs"); - if(openRevs == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - result = new ArrayList>(openRevs.size()); - for (String revID : openRevs) { - TDRevision rev = db.getDocumentWithIDAndRev(docID, revID, options); - if(rev != null) { - Map dict = new HashMap(); - dict.put("ok", rev.getProperties()); - result.add(dict); - } else { - Map dict = new HashMap(); - dict.put("missing", revID); - result.add(dict); - } - } - } - String acceptMultipart = getMultipartRequestType(); - if(acceptMultipart != null) { - //FIXME figure out support for multipart - throw new UnsupportedOperationException(); - } else { - connection.setResponseBody(new TDBody(result)); - } - } - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_Attachment(TDDatabase _db, String docID, String _attachmentName) { - // http://wiki.apache.org/couchdb/HTTP_Document_API#GET - EnumSet options = getContentOptions(); - options.add(TDContentOptions.TDNoBody); - String revID = getQuery("rev"); // often null - TDRevision rev = db.getDocumentWithIDAndRev(docID, revID, options); - if(rev == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - if(cacheWithEtag(rev.getRevId())) { - return new TDStatus(TDStatus.NOT_MODIFIED); // set ETag and check conditional GET - } - - String type = null; - TDStatus status = new TDStatus(); - String acceptEncoding = connection.getRequestProperty("Accept-Encoding"); - TDAttachment contents = db.getAttachmentForSequence(rev.getSequence(), _attachmentName, status); - - if (contents == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - type = contents.getContentType(); - if (type != null) { - connection.getResHeader().add("Content-Type", type); - } - if (acceptEncoding != null && acceptEncoding.equals("gzip")) { - connection.getResHeader().add("Content-Encoding", acceptEncoding); - } - - connection.setResponseInputStream(contents.getContentStream()); - return new TDStatus(TDStatus.OK); - } - - /** - * NOTE this departs from the iOS version, returning revision, passing status back by reference - */ - public TDRevision update(TDDatabase _db, String docID, TDBody body, boolean deleting, boolean allowConflict, TDStatus outStatus) { - boolean isLocalDoc = docID != null && docID.startsWith(("_local")); - String prevRevID = null; - - if(!deleting) { - Boolean deletingBoolean = (Boolean)body.getPropertyForKey("deleted"); - deleting = (deletingBoolean != null && deletingBoolean.booleanValue()); - if(docID == null) { - if(isLocalDoc) { - outStatus.setCode(TDStatus.METHOD_NOT_ALLOWED); - return null; - } - // POST's doc ID may come from the _id field of the JSON body, else generate a random one. - docID = (String)body.getPropertyForKey("_id"); - if(docID == null) { - if(deleting) { - outStatus.setCode(TDStatus.BAD_REQUEST); - return null; - } - docID = TDDatabase.generateDocumentId(); - } - } - // PUT's revision ID comes from the JSON body. - prevRevID = (String)body.getPropertyForKey("_rev"); - } else { - // DELETE's revision ID comes from the ?rev= query param - prevRevID = getQuery("rev"); - } - - // A backup source of revision ID is an If-Match header: - if(prevRevID == null) { - prevRevID = getRevIDFromIfMatchHeader(); - } - - TDRevision rev = new TDRevision(docID, null, deleting); - rev.setBody(body); - - TDRevision result = null; - TDStatus tmpStatus = new TDStatus(); - if(isLocalDoc) { - result = _db.putLocalRevision(rev, prevRevID, tmpStatus); - } else { - result = _db.putRevision(rev, prevRevID, allowConflict, tmpStatus); - } - outStatus.setCode(tmpStatus.getCode()); - return result; - } - - public TDStatus update(TDDatabase _db, String docID, Map bodyDict, boolean deleting) { - TDBody body = new TDBody(bodyDict); - TDStatus status = new TDStatus(); - TDRevision rev = update(_db, docID, body, deleting, false, status); - if(status.isSuccessful()) { - cacheWithEtag(rev.getRevId()); // set ETag - if(!deleting) { - URL url = connection.getURL(); - String urlString = url.toExternalForm(); - if(docID != null) { - urlString += "/" + rev.getDocId(); - try { - url = new URL(urlString); - } catch (MalformedURLException e) { - Log.w("Malformed URL", e); - } - } - setResponseLocation(url); - } - Map result = new HashMap(); - result.put("ok", true); - result.put("id", rev.getDocId()); - result.put("rev", rev.getRevId()); - connection.setResponseBody(new TDBody(result)); - } - return status; - } - - public TDStatus do_PUT_Document(TDDatabase _db, String docID, String _attachmentName) { - Map bodyDict = getBodyAsDictionary(); - if(bodyDict == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - - if(getQuery("new_edits") == null || (getQuery("new_edits") != null && (new Boolean(getQuery("new_edits"))))) { - // Regular PUT - return update(_db, docID, bodyDict, false); - } else { - // PUT with new_edits=false -- forcible insertion of existing revision: - TDBody body = new TDBody(bodyDict); - TDRevision rev = new TDRevision(body); - if(rev.getRevId() == null || rev.getDocId() == null || !rev.getDocId().equals(docID)) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - List history = TDDatabase.parseCouchDBRevisionHistory(body.getProperties()); - return db.forceInsert(rev, history, null); - } - } - - public TDStatus do_DELETE_Document(TDDatabase _db, String docID, String _attachmentName) { - return update(_db, docID, null, true); - } - - public TDStatus updateAttachment(String attachment, String docID, InputStream contentStream) { - TDStatus status = new TDStatus(); - String revID = getQuery("rev"); - if(revID == null) { - revID = getRevIDFromIfMatchHeader(); - } - TDRevision rev = db.updateAttachment(attachment, contentStream, connection.getRequestProperty("content-type"), - docID, revID, status); - if(status.isSuccessful()) { - Map resultDict = new HashMap(); - resultDict.put("ok", true); - resultDict.put("id", rev.getDocId()); - resultDict.put("rev", rev.getRevId()); - connection.setResponseBody(new TDBody(resultDict)); - cacheWithEtag(rev.getRevId()); - if(contentStream != null) { - setResponseLocation(connection.getURL()); - } - } - return status; - } - - public TDStatus do_PUT_Attachment(TDDatabase _db, String docID, String _attachmentName) { - return updateAttachment(_attachmentName, docID, connection.getRequestInputStream()); - } - - public TDStatus do_DELETE_Attachment(TDDatabase _db, String docID, String _attachmentName) { - return updateAttachment(_attachmentName, docID, null); - } - - /** VIEW QUERIES: **/ - - public TDView compileView(String viewName, Map viewProps) { - String language = (String)viewProps.get("language"); - if(language == null) { - language = "javascript"; - } - String mapSource = (String)viewProps.get("map"); - if(mapSource == null) { - return null; - } - TDViewMapBlock mapBlock = TDView.getCompiler().compileMapFunction(mapSource, language); - if(mapBlock == null) { - Log.w(TDDatabase.TAG, String.format("View %s has unknown map function: %s", viewName, mapSource)); - return null; - } - String reduceSource = (String)viewProps.get("reduce"); - TDViewReduceBlock reduceBlock = null; - if(reduceSource != null) { - reduceBlock = TDView.getCompiler().compileReduceFunction(reduceSource, language); - if(reduceBlock == null) { - Log.w(TDDatabase.TAG, String.format("View %s has unknown reduce function: %s", viewName, reduceBlock)); - return null; - } - } - - TDView view = db.getViewNamed(viewName); - view.setMapReduceBlocks(mapBlock, reduceBlock, "1"); - String collation = (String)viewProps.get("collation"); - if("raw".equals(collation)) { - view.setCollation(TDViewCollation.TDViewCollationRaw); - } - return view; - } - - public TDStatus queryDesignDoc(String designDoc, String viewName, List keys) { - String tdViewName = String.format("%s/%s", designDoc, viewName); - TDView view = db.getExistingViewNamed(tdViewName); - if(view == null || view.getMapBlock() == null) { - // No TouchDB view is defined, or it hasn't had a map block assigned; - // see if there's a CouchDB view definition we can compile: - TDRevision rev = db.getDocumentWithIDAndRev(String.format("_design/%s", designDoc), null, EnumSet.noneOf(TDContentOptions.class)); - if(rev == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - Map views = (Map)rev.getProperties().get("views"); - Map viewProps = (Map)views.get(viewName); - if(viewProps == null) { - return new TDStatus(TDStatus.NOT_FOUND); - } - // If there is a CouchDB view, see if it can be compiled from source: - view = compileView(tdViewName, viewProps); - if(view == null) { - return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); - } - } - - TDQueryOptions options = new TDQueryOptions(); - - //if the view contains a reduce block, it should default to reduce=true - if(view.getReduceBlock() != null) { - options.setReduce(true); - } - - if(!getQueryOptions(options)) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - if(keys != null) { - options.setKeys(keys); - } - - TDStatus status = view.updateIndex(); - if(!status.isSuccessful()) { - return status; - } - - long lastSequenceIndexed = view.getLastSequenceIndexed(); - - // Check for conditional GET and set response Etag header: - if(keys == null) { - long eTag = options.isIncludeDocs() ? db.getLastSequence() : lastSequenceIndexed; - if(cacheWithEtag(String.format("%d", eTag))) { - return new TDStatus(TDStatus.NOT_MODIFIED); - } - } - - List> rows = view.queryWithOptions(options, status); - if(rows == null) { - return status; - } - - Map responseBody = new HashMap(); - responseBody.put("rows", rows); - responseBody.put("total_rows", rows.size()); - responseBody.put("offset", options.getSkip()); - if(options.isUpdateSeq()) { - responseBody.put("update_seq", lastSequenceIndexed); - } - connection.setResponseBody(new TDBody(responseBody)); - return new TDStatus(TDStatus.OK); - } - - public TDStatus do_GET_DesignDocument(TDDatabase _db, String designDocID, String viewName) { - return queryDesignDoc(designDocID, viewName, null); - } - - public TDStatus do_POST_DesignDocument(TDDatabase _db, String designDocID, String viewName) { - Map bodyDict = getBodyAsDictionary(); - if(bodyDict == null) { - return new TDStatus(TDStatus.BAD_REQUEST); - } - List keys = (List) bodyDict.get("keys"); - return queryDesignDoc(designDocID, viewName, keys); - } - - @Override - public String toString() { - String url = "Unknown"; - if(connection != null && connection.getURL() != null) { - url = connection.getURL().toExternalForm(); - } - return String.format("TDRouter [%s]", url); - } + private TDServer server; + private TDDatabase db; + private TDURLConnection connection; + private Map queries; + private boolean changesIncludesDocs = false; + private TDRouterCallbackBlock callbackBlock; + private boolean responseSent = false; + private boolean waiting = false; + private TDFilterBlock changesFilter; + private boolean longpoll = false; + + public static String getVersionString() { + return TouchDBVersion.TouchDBVersionNumber; + } + + public TDRouter(TDServer server, TDURLConnection connection) { + this.server = server; + this.connection = connection; + } + + public void setCallbackBlock(TDRouterCallbackBlock callbackBlock) { + this.callbackBlock = callbackBlock; + } + + public Map getQueries() { + if (queries == null) { + String queryString = connection.getURL().getQuery(); + if (queryString != null && queryString.length() > 0) { + queries = new HashMap(); + for (String component : queryString.split("&")) { + int location = component.indexOf('='); + if (location > 0) { + String key = component.substring(0, location); + String value = component.substring(location + 1); + queries.put(key, value); + } + } + + } + } + return queries; + } + + public String getQuery(String param) { + Map queries = getQueries(); + if (queries != null) { + String value = queries.get(param); + if (value != null) { + return URLDecoder.decode(value); + } + } + return null; + } + + public boolean getBooleanQuery(String param) { + String value = getQuery(param); + return (value != null) && !"false".equals(value) && !"0".equals(value); + } + + public int getIntQuery(String param, int defaultValue) { + int result = defaultValue; + String value = getQuery(param); + if (value != null) { + try { + result = Integer.parseInt(value); + } catch (NumberFormatException e) { + // ignore, will return default value + } + } + + return result; + } + + public Object getJSONQuery(String param) { + String value = getQuery(param); + if (value == null) { + return null; + } + Object result = null; + try { + result = TDServer.getObjectMapper().readValue(value, Object.class); + } catch (Exception e) { + Log.w("Unable to parse JSON Query", e); + } + return result; + } + + public boolean cacheWithEtag(String etag) { + String eTag = String.format("\"%s\"", etag); + connection.getResHeader().add("Etag", eTag); + String requestIfNoneMatch = connection + .getRequestProperty("If-None-Match"); + return eTag.equals(requestIfNoneMatch); + } + + public Map getBodyAsDictionary() { + try { + InputStream contentStream = connection.getRequestInputStream(); + Map bodyMap = TDServer.getObjectMapper().readValue( + contentStream, Map.class); + return bodyMap; + } catch (IOException e) { + return null; + } + } + + public EnumSet getContentOptions() { + EnumSet result = EnumSet + .noneOf(TDContentOptions.class); + if (getBooleanQuery("attachments")) { + result.add(TDContentOptions.TDIncludeAttachments); + } + if (getBooleanQuery("local_seq")) { + result.add(TDContentOptions.TDIncludeLocalSeq); + } + if (getBooleanQuery("conflicts")) { + result.add(TDContentOptions.TDIncludeConflicts); + } + if (getBooleanQuery("revs")) { + result.add(TDContentOptions.TDIncludeRevs); + } + if (getBooleanQuery("revs_info")) { + result.add(TDContentOptions.TDIncludeRevsInfo); + } + return result; + } + + public boolean getQueryOptions(TDQueryOptions options) { + // http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options + options.setSkip(getIntQuery("skip", options.getSkip())); + options.setLimit(getIntQuery("limit", options.getLimit())); + options.setGroupLevel(getIntQuery("group_level", + options.getGroupLevel())); + options.setDescending(getBooleanQuery("descending")); + options.setIncludeDocs(getBooleanQuery("include_docs")); + options.setUpdateSeq(getBooleanQuery("update_seq")); + if (getQuery("inclusive_end") != null) { + options.setInclusiveEnd(getBooleanQuery("inclusive_end")); + } + if (getQuery("reduce") != null) { + options.setReduce(getBooleanQuery("reduce")); + } + options.setGroup(getBooleanQuery("group")); + options.setContentOptions(getContentOptions()); + options.setStartKey(getJSONQuery("startkey")); + options.setEndKey(getJSONQuery("endkey")); + Object key = getJSONQuery("key"); + if (key != null) { + List keys = new ArrayList(); + keys.add(key); + options.setKeys(keys); + } + return true; + } + + public String getMultipartRequestType() { + String accept = connection.getRequestProperty("Accept"); + if (accept.startsWith("multipart/")) { + return accept; + } + return null; + } + + public TDStatus openDB() { + if (db == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + if (!db.exists()) { + return new TDStatus(TDStatus.NOT_FOUND); + } + if (!db.open()) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + return new TDStatus(TDStatus.OK); + } + + public static List splitPath(URL url) { + String pathString = url.getPath(); + if (pathString.startsWith("/")) { + pathString = pathString.substring(1); + } + List result = new ArrayList(); + // we want empty string to return empty list + if (pathString.length() == 0) { + return result; + } + for (String component : pathString.split("/")) { + result.add(URLDecoder.decode(component)); + } + return result; + } + + public void sendResponse() { + if (!responseSent) { + responseSent = true; + if (callbackBlock != null) { + callbackBlock.onResponseReady(); + } + } + } + + public void start() { + // Refer to: http://wiki.apache.org/couchdb/Complete_HTTP_API_Reference + + // We're going to map the request into a method call using reflection + // based on the method and path. + // Accumulate the method name into the string 'message': + String method = connection.getRequestMethod(); + if ("HEAD".equals(method)) { + method = "GET"; + } + String message = String.format("do_%s", method); + + // First interpret the components of the request: + List path = splitPath(connection.getURL()); + if (path == null) { + connection.setResponseCode(TDStatus.BAD_REQUEST); + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, "Error closing empty output stream"); + } + sendResponse(); + return; + } + + int pathLen = path.size(); + if (pathLen > 0) { + String dbName = path.get(0); + if (dbName.startsWith("_")) { + message += dbName; // special root path, like /_all_dbs + } else { + message += "_Database"; + db = server.getDatabaseNamed(dbName); + if (db == null) { + connection.setResponseCode(TDStatus.BAD_REQUEST); + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, + "Error closing empty output stream"); + } + sendResponse(); + return; + } + } + } else { + message += "Root"; + } + + String docID = null; + if (db != null && pathLen > 1) { + message = message.replaceFirst("_Database", "_Document"); + // Make sure database exists, then interpret doc name: + TDStatus status = openDB(); + if (!status.isSuccessful()) { + connection.setResponseCode(status.getCode()); + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, "Error closing empty output stream"); + } + sendResponse(); + return; + } + String name = path.get(1); + if (!name.startsWith("_")) { + // Regular document + if (!TDDatabase.isValidDocumentId(name)) { + connection.setResponseCode(TDStatus.BAD_REQUEST); + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, + "Error closing empty output stream"); + } + sendResponse(); + return; + } + docID = name; + } else if ("_design".equals(name) || "_local".equals(name)) { + // "_design/____" and "_local/____" are document names + if (pathLen <= 2) { + connection.setResponseCode(TDStatus.NOT_FOUND); + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, + "Error closing empty output stream"); + } + sendResponse(); + return; + } + docID = name + "/" + path.get(2); + path.set(1, docID); + path.remove(2); + pathLen--; + } else if (name.startsWith("_design") || name.startsWith("_local")) { + // This is also a document, just with a URL-encoded "/" + docID = name; + } else { + // Special document name like "_all_docs": + message += name; + if (pathLen > 2) { + List subList = path.subList(2, pathLen - 1); + StringBuilder sb = new StringBuilder(); + Iterator iter = subList.iterator(); + while (iter.hasNext()) { + sb.append(iter.next()); + if (iter.hasNext()) { + sb.append("/"); + } + } + docID = sb.toString(); + } + } + } + + String attachmentName = null; + if (docID != null && pathLen > 2) { + message = message.replaceFirst("_Document", "_Attachment"); + // Interpret attachment name: + attachmentName = path.get(2); + if (attachmentName.startsWith("_") && docID.startsWith("_design")) { + // Design-doc attribute like _info or _view + message = message + .replaceFirst("_Attachment", "_DesignDocument"); + docID = docID.substring(8); // strip the "_design/" prefix + attachmentName = pathLen > 3 ? path.get(3) : null; + } else { + if (pathLen > 3) { + List subList = path.subList(2, pathLen); + StringBuilder sb = new StringBuilder(); + Iterator iter = subList.iterator(); + while (iter.hasNext()) { + sb.append(iter.next()); + if (iter.hasNext()) { + // sb.append("%2F"); + sb.append("/"); + } + } + attachmentName = sb.toString(); + } + } + } + + // Log.d(TAG, "path: " + path + " message: " + message + " docID: " + + // docID + " attachmentName: " + attachmentName); + + // Send myself a message based on the components: + TDStatus status = new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + try { + Method m = this.getClass().getMethod(message, TDDatabase.class, + String.class, String.class); + status = (TDStatus) m.invoke(this, db, docID, attachmentName); + } catch (NoSuchMethodException msme) { + try { + Method m = this.getClass().getMethod("do_UNKNOWN", + TDDatabase.class, String.class, String.class); + status = (TDStatus) m.invoke(this, db, docID, attachmentName); + } catch (Exception e) { + // default status is internal server error + } + } catch (Exception e) { + // default status is internal server error + Log.e(TDDatabase.TAG, "Exception in TDRouter", e); + } + + // Configure response headers: + if (status.isSuccessful() && connection.getResponseBody() == null + && connection.getHeaderField("Content-Type") == null) { + connection.setResponseBody(new TDBody("{\"ok\":true}".getBytes())); + } + + if (connection.getResponseBody() != null + && connection.getResponseBody().isValidJSON()) { + connection.getResHeader().add("Content-Type", "application/json"); + } + + // Check for a mismatch between the Accept request header and the + // response type: + String accept = connection.getRequestProperty("Accept"); + if (accept != null && !"*/*".equals(accept)) { + String responseType = connection.getBaseContentType(); + if (responseType != null && accept.indexOf(responseType) < 0) { + Log.e(TDDatabase.TAG, String.format( + "Error 406: Can't satisfy request Accept: %s", accept)); + status = new TDStatus(TDStatus.NOT_ACCEPTABLE); + } + } + + connection.getResHeader().add("Server", + String.format("TouchDB %s", getVersionString())); + + // If response is ready (nonzero status), tell my client about it: + if (status.getCode() != 0) { + connection.setResponseCode(status.getCode()); + + if (connection.getResponseBody() != null) { + ByteArrayInputStream bais = new ByteArrayInputStream(connection + .getResponseBody().getJson()); + connection.setResponseInputStream(bais); + } else { + + try { + connection.getResponseOutputStream().close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, "Error closing empty output stream"); + } + } + sendResponse(); + } + } + + public void stop() { + callbackBlock = null; + if (db != null) { + db.deleteObserver(this); + } + } + + public TDStatus do_UNKNOWN(TDDatabase db, String docID, + String attachmentName) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + /*************************************************************************************************/ + /*** TDRouter+Handlers ***/ + /*************************************************************************************************/ + + public void setResponseLocation(URL url) { + String location = url.toExternalForm(); + String query = url.getQuery(); + if (query != null) { + int startOfQuery = location.indexOf(query); + if (startOfQuery > 0) { + location = location.substring(0, startOfQuery); + } + } + connection.getResHeader().add("Location", location); + } + + /** SERVER REQUESTS: **/ + + public TDStatus do_GETRoot(TDDatabase _db, String _docID, + String _attachmentName) { + Map info = new HashMap(); + info.put("TouchDB", "Welcome"); + info.put("couchdb", "Welcome"); // for compatibility + info.put("version", getVersionString()); + connection.setResponseBody(new TDBody(info)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_all_dbs(TDDatabase _db, String _docID, + String _attachmentName) { + List dbs = server.allDatabaseNames(); + connection.setResponseBody(new TDBody(dbs)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_session(TDDatabase _db, String _docID, + String _attachmentName) { + // Send back an "Admin Party"-like response + Map session = new HashMap(); + Map userCtx = new HashMap(); + String[] roles = { "_admin" }; + session.put("ok", true); + userCtx.put("name", null); + userCtx.put("roles", roles); + session.put("userCtx", userCtx); + connection.setResponseBody(new TDBody(session)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_POST_replicate(TDDatabase _db, String _docID, + String _attachmentName) { + // Extract the parameters from the JSON request body: + // http://wiki.apache.org/couchdb/Replication + Map body = getBodyAsDictionary(); + if (body == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + String source = (String) body.get("source"); + String target = (String) body.get("target"); + Boolean createTargetBoolean = (Boolean) body.get("create_target"); + boolean createTarget = (createTargetBoolean != null && createTargetBoolean + .booleanValue()); + Boolean continuousBoolean = (Boolean) body.get("continuous"); + boolean continuous = (continuousBoolean != null && continuousBoolean + .booleanValue()); + Boolean cancelBoolean = (Boolean) body.get("cancel"); + boolean cancel = (cancelBoolean != null && cancelBoolean.booleanValue()); + + Map query_params = (Map) body + .get("query_params"); + String access_token = (String) query_params.get("access_token"); + + // All query params starting with header: need to move into headers + Map headers = new HashMap(); + for (String key : query_params.keySet()) { + if (key.startsWith("header:")) { + String value = String.valueOf(query_params.get(key)); + key = key.replaceFirst("header:", ""); + headers.put(key, value); + } + } + + // Remove any headers that are present in query_params. We don't remove + // simultaneously because of concurrent exception + for (String key : headers.keySet()) { + query_params.remove("header:" + key); + } + + // Map the 'source' and 'target' JSON params to a local database and + // remote URL: + if (source == null || target == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + boolean push = false; + TDDatabase db = server.getExistingDatabaseNamed(source); + String remoteStr = null; + if (db != null) { + remoteStr = target; + push = true; + } else { + remoteStr = source; + if (createTarget && !cancel) { + db = server.getDatabaseNamed(target); + if (!db.open()) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + } else { + db = server.getExistingDatabaseNamed(target); + } + if (db == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + } + + URL remote = null; + try { + remote = new URL(remoteStr); + } catch (MalformedURLException e) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + if (remote == null || !remote.getProtocol().startsWith("http")) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + if (!cancel) { + // Start replication: + TDReplicator repl = db.getReplicator(remote, + server.getDefaultHttpClientFactory(), push, access_token, + headers, continuous, server.getWorkExecutor()); + if (repl == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + String filterName = (String) body.get("filter"); + if (filterName != null) { + repl.setFilterName(filterName); + Map filterParams = (Map) body + .get("query_params"); + if (filterParams != null) { + repl.setFilterParams(filterParams); + } + } + + if (push) { + ((TDPusher) repl).setCreateTarget(createTarget); + } + repl.start(); + Map result = new HashMap(); + result.put("session_id", repl.getSessionID()); + connection.setResponseBody(new TDBody(result)); + } else { + // Cancel replication: + TDReplicator repl = db.getActiveReplicator(remote, push); + if (repl == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + repl.stop(); + } + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_uuids(TDDatabase _db, String _docID, + String _attachmentName) { + int count = Math.min(1000, getIntQuery("count", 1)); + List uuids = new ArrayList(count); + for (int i = 0; i < count; i++) { + uuids.add(TDDatabase.generateDocumentId()); + } + Map result = new HashMap(); + result.put("uuids", uuids); + connection.setResponseBody(new TDBody(result)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_active_tasks(TDDatabase _db, String _docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HttpGetActiveTasks + List> activities = new ArrayList>(); + for (TDDatabase db : server.allOpenDatabases()) { + List activeReplicators = db.getActiveReplicators(); + if (activeReplicators != null) { + for (TDReplicator replicator : activeReplicators) { + String source = replicator.getRemote().toExternalForm(); + String target = db.getName(); + if (replicator.isPush()) { + String tmp = source; + source = target; + target = tmp; + } + int processed = replicator.getChangesProcessed(); + int total = replicator.getChangesTotal(); + String status = String.format("Processed %d / %d changes", + processed, total); + int progress = (total > 0) ? Math.round(100 * processed + / (float) total) : 0; + Map activity = new HashMap(); + activity.put("type", "Replication"); + activity.put("task", replicator.getSessionID()); + activity.put("source", source); + activity.put("target", target); + activity.put("status", status); + activity.put("progress", progress); + activities.add(activity); + } + } + } + connection.setResponseBody(new TDBody(activities)); + return new TDStatus(TDStatus.OK); + } + + /** DATABASE REQUESTS: **/ + + public TDStatus do_GET_Database(TDDatabase _db, String _docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HTTP_database_API#Database_Information + TDStatus status = openDB(); + if (!status.isSuccessful()) { + return status; + } + int num_docs = db.getDocumentCount(); + long update_seq = db.getLastSequence(); + Map result = new HashMap(); + result.put("db_name", db.getName()); + result.put("db_uuid", db.publicUUID()); + result.put("doc_count", num_docs); + result.put("update_seq", update_seq); + result.put("disk_size", db.totalDataSize()); + connection.setResponseBody(new TDBody(result)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_PUT_Database(TDDatabase _db, String _docID, + String _attachmentName) { + if (db.exists()) { + return new TDStatus(TDStatus.PRECONDITION_FAILED); + } + if (!db.open()) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + setResponseLocation(connection.getURL()); + return new TDStatus(TDStatus.CREATED); + } + + public TDStatus do_DELETE_Database(TDDatabase _db, String _docID, + String _attachmentName) { + if (getQuery("rev") != null) { + return new TDStatus(TDStatus.BAD_REQUEST); // CouchDB checks for + // this; probably meant + // to be a document + // deletion + } + return server.deleteDatabaseNamed(db.getName()) ? new TDStatus( + TDStatus.OK) : new TDStatus(TDStatus.NOT_FOUND); + } + + public TDStatus do_POST_Database(TDDatabase _db, String _docID, + String _attachmentName) { + TDStatus status = openDB(); + if (!status.isSuccessful()) { + return status; + } + return update(db, null, getBodyAsDictionary(), false); + } + + public TDStatus do_GET_Document_all_docs(TDDatabase _db, String _docID, + String _attachmentName) { + TDQueryOptions options = new TDQueryOptions(); + if (!getQueryOptions(options)) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + Map result = db.getAllDocs(options); + if (result == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + connection.setResponseBody(new TDBody(result)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_POST_Document_all_docs(TDDatabase _db, String _docID, + String _attachmentName) { + TDQueryOptions options = new TDQueryOptions(); + if (!getQueryOptions(options)) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + Map body = getBodyAsDictionary(); + if (body == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + Map result = null; + if (body.containsKey("keys") && body.get("keys") instanceof ArrayList) { + ArrayList keys = (ArrayList) body.get("keys"); + result = db.getDocsWithIDs(keys, options); + } else { + result = db.getAllDocs(options); + } + + if (result == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + connection.setResponseBody(new TDBody(result)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_POST_Document_bulk_docs(TDDatabase _db, String _docID, + String _attachmentName) { + Map bodyDict = getBodyAsDictionary(); + if (bodyDict == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + List> docs = (List>) bodyDict + .get("docs"); + + boolean allObj = false; + if (getQuery("all_or_nothing") == null + || (getQuery("all_or_nothing") != null && (new Boolean( + getQuery("all_or_nothing"))))) { + allObj = true; + } + // allowConflict If false, an error status 409 will be returned if the + // insertion would create a conflict, i.e. if the previous revision + // already has a child. + boolean allOrNothing = (allObj && allObj != false); + boolean noNewEdits = true; + if (getQuery("new_edits") == null + || (getQuery("new_edits") != null && (new Boolean( + getQuery("new_edits"))))) { + noNewEdits = false; + } + boolean ok = false; + db.beginTransaction(); + List> results = new ArrayList>(); + try { + for (Map doc : docs) { + String docID = (String) doc.get("_id"); + TDRevision rev = null; + TDStatus status = new TDStatus(TDStatus.BAD_REQUEST); + TDBody docBody = new TDBody(doc); + if (noNewEdits) { + rev = new TDRevision(docBody); + if (rev.getRevId() == null || rev.getDocId() == null + || !rev.getDocId().equals(docID)) { + status = new TDStatus(TDStatus.BAD_REQUEST); + } else { + List history = TDDatabase + .parseCouchDBRevisionHistory(doc); + status = db.forceInsert(rev, history, null); + } + } else { + TDStatus outStatus = new TDStatus(); + rev = update(db, docID, docBody, false, allOrNothing, + outStatus); + status.setCode(outStatus.getCode()); + } + Map result = null; + if (status.isSuccessful()) { + result = new HashMap(); + result.put("ok", true); + result.put("id", docID); + if (rev != null) { + result.put("rev", rev.getRevId()); + } + } else if (allOrNothing) { + return status; // all_or_nothing backs out if there's any + // error + } else if (status.getCode() == TDStatus.FORBIDDEN) { + result = new HashMap(); + result.put("error", "validation failed"); + result.put("id", docID); + } else if (status.getCode() == TDStatus.CONFLICT) { + result = new HashMap(); + result.put("error", "conflict"); + result.put("id", docID); + } else { + return status; // abort the whole thing if something goes + // badly wrong + } + if (result != null) { + results.add(result); + } + } + Log.w(TDDatabase.TAG, String.format( + "%s finished inserting %d revisions in bulk", this, + docs.size())); + ok = true; + } catch (Exception e) { + Log.w(TDDatabase.TAG, String.format( + "%s: Exception inserting revisions in bulk", this), e); + } finally { + db.endTransaction(ok); + } + Log.d(TDDatabase.TAG, "results: " + results.toString()); + connection.setResponseBody(new TDBody(results)); + return new TDStatus(TDStatus.CREATED); + } + + public TDStatus do_POST_Document_revs_diff(TDDatabase _db, String _docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HttpPostRevsDiff + // Collect all of the input doc/revision IDs as TDRevisions: + TDRevisionList revs = new TDRevisionList(); + Map body = getBodyAsDictionary(); + if (body == null) { + return new TDStatus(TDStatus.BAD_JSON); + } + for (String docID : body.keySet()) { + List revIDs = (List) body.get(docID); + for (String revID : revIDs) { + TDRevision rev = new TDRevision(docID, revID, false); + revs.add(rev); + } + } + + // Look them up, removing the existing ones from revs: + if (db.findMissingRevisions(revs) != null) { + return new TDStatus(TDStatus.DB_ERROR); + } + + // Return the missing revs in a somewhat different format: + Map diffs = new HashMap(); + for (TDRevision rev : revs) { + String docID = rev.getDocId(); + + List missingRevs = null; + Map idObj = (Map) diffs.get(docID); + if (idObj != null) { + missingRevs = (List) idObj.get("missing"); + } else { + idObj = new HashMap(); + } + + if (missingRevs == null) { + missingRevs = new ArrayList(); + idObj.put("missing", missingRevs); + diffs.put(docID, idObj); + } + missingRevs.add(rev.getRevId()); + } + + // FIXME add support for possible_ancestors + + connection.setResponseBody(new TDBody(diffs)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_POST_Document_compact(TDDatabase _db, String _docID, + String _attachmentName) { + TDStatus status = _db.compact(); + if (status.getCode() < 300) { + TDStatus outStatus = new TDStatus(); + outStatus.setCode(202); // CouchDB returns 202 'cause it's an async + // operation + return outStatus; + } else { + return status; + } + } + + public TDStatus do_POST_Document_ensure_full_commit(TDDatabase _db, + String _docID, String _attachmentName) { + return new TDStatus(TDStatus.OK); + } + + /** CHANGES: **/ + + public Map changesDictForRevision(TDRevision rev) { + Map changesDict = new HashMap(); + changesDict.put("rev", rev.getRevId()); + + List> changes = new ArrayList>(); + changes.add(changesDict); + + Map result = new HashMap(); + result.put("seq", rev.getSequence()); + result.put("id", rev.getDocId()); + result.put("changes", changes); + if (rev.isDeleted()) { + result.put("deleted", true); + } + if (changesIncludesDocs) { + result.put("doc", rev.getProperties()); + } + return result; + } + + public Map responseBodyForChanges(List changes, + long since) { + List> results = new ArrayList>(); + for (TDRevision rev : changes) { + Map changeDict = changesDictForRevision(rev); + results.add(changeDict); + } + if (changes.size() > 0) { + since = changes.get(changes.size() - 1).getSequence(); + } + Map result = new HashMap(); + result.put("results", results); + result.put("last_seq", since); + return result; + } + + public Map responseBodyForChangesWithConflicts( + List changes, long since) { + // Assumes the changes are grouped by docID so that conflicts will be + // adjacent. + List> entries = new ArrayList>(); + String lastDocID = null; + Map lastEntry = null; + for (TDRevision rev : changes) { + String docID = rev.getDocId(); + if (docID.equals(lastDocID)) { + Map changesDict = new HashMap(); + changesDict.put("rev", rev.getRevId()); + List> inchanges = (List>) lastEntry + .get("changes"); + inchanges.add(changesDict); + } else { + lastEntry = changesDictForRevision(rev); + entries.add(lastEntry); + lastDocID = docID; + } + } + // After collecting revisions, sort by sequence: + Collections.sort(entries, new Comparator>() { + public int compare(Map e1, Map e2) { + return TDMisc.TDSequenceCompare((Long) e1.get("seq"), + (Long) e2.get("seq")); + } + }); + + Long lastSeq = (Long) entries.get(entries.size() - 1).get("seq"); + if (lastSeq == null) { + lastSeq = since; + } + + Map result = new HashMap(); + result.put("results", entries); + result.put("last_seq", lastSeq); + return result; + } + + public void sendContinuousChange(TDRevision rev) { + Map changeDict = changesDictForRevision(rev); + try { + String jsonString = TDServer.getObjectMapper().writeValueAsString( + changeDict); + if (callbackBlock != null) { + byte[] json = (jsonString + "\n").getBytes(); + OutputStream os = connection.getResponseOutputStream(); + try { + os.write(json); + os.flush(); + } catch (Exception e) { + Log.e(TDDatabase.TAG, + "IOException writing to internal streams", e); + } + } + } catch (Exception e) { + Log.w("Unable to serialize change to JSON", e); + } + } + + @Override + public void update(Observable observable, Object changeObject) { + if (observable == db) { + // make sure we're listening to the right events + Map changeNotification = (Map) changeObject; + + TDRevision rev = (TDRevision) changeNotification.get("rev"); + + if (changesFilter != null && !changesFilter.filter(rev)) { + return; + } + + if (longpoll) { + Log.w(TDDatabase.TAG, "TDRouter: Sending longpoll response"); + sendResponse(); + List revs = new ArrayList(); + revs.add(rev); + Map body = responseBodyForChanges(revs, 0); + if (callbackBlock != null) { + byte[] data = null; + try { + data = TDServer.getObjectMapper().writeValueAsBytes( + body); + } catch (Exception e) { + Log.w(TDDatabase.TAG, "Error serializing JSON", e); + } + OutputStream os = connection.getResponseOutputStream(); + try { + os.write(data); + os.close(); + } catch (IOException e) { + Log.e(TDDatabase.TAG, + "IOException writing to internal streams", e); + } + } + } else { + Log.w(TDDatabase.TAG, + "TDRouter: Sending continous change chunk"); + sendContinuousChange(rev); + } + + } + + } + + public TDStatus do_GET_Document_changes(TDDatabase _db, String docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HTTP_database_API#Changes + TDChangesOptions options = new TDChangesOptions(); + changesIncludesDocs = getBooleanQuery("include_docs"); + options.setIncludeDocs(changesIncludesDocs); + String style = getQuery("style"); + if (style != null && style.equals("all_docs")) { + options.setIncludeConflicts(true); + } + options.setContentOptions(getContentOptions()); + options.setSortBySequence(!options.isIncludeConflicts()); + options.setLimit(getIntQuery("limit", options.getLimit())); + + int since = getIntQuery("since", 0); + + String filterName = getQuery("filter"); + if (filterName != null) { + changesFilter = db.getFilterNamed(filterName); + if (changesFilter == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + } + + TDRevisionList changes = db.changesSince(since, options, changesFilter); + + if (changes == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + + String feed = getQuery("feed"); + longpoll = "longpoll".equals(feed); + boolean continuous = !longpoll && "continuous".equals(feed); + + if (continuous || (longpoll && changes.size() == 0)) { + connection.setChunked(true); + connection.setResponseCode(TDStatus.OK); + sendResponse(); + if (continuous) { + for (TDRevision rev : changes) { + sendContinuousChange(rev); + } + } + db.addObserver(this); + // Don't close connection; more data to come + return new TDStatus(0); + } else { + if (options.isIncludeConflicts()) { + connection.setResponseBody(new TDBody( + responseBodyForChangesWithConflicts(changes, since))); + } else { + connection.setResponseBody(new TDBody(responseBodyForChanges( + changes, since))); + } + return new TDStatus(TDStatus.OK); + } + } + + /** DOCUMENT REQUESTS: **/ + + public String getRevIDFromIfMatchHeader() { + String ifMatch = connection.getRequestProperty("If-Match"); + if (ifMatch == null) { + return null; + } + // Value of If-Match is an ETag, so have to trim the quotes around it: + if (ifMatch.length() > 2 && ifMatch.startsWith("\"") + && ifMatch.endsWith("\"")) { + return ifMatch.substring(1, ifMatch.length() - 2); + } else { + return null; + } + } + + public String setResponseEtag(TDRevision rev) { + String eTag = String.format("\"%s\"", rev.getRevId()); + connection.getResHeader().add("Etag", eTag); + return eTag; + } + + public TDStatus do_GET_Document(TDDatabase _db, String docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HTTP_Document_API#GET + boolean isLocalDoc = docID.startsWith("_local"); + EnumSet options = getContentOptions(); + String openRevsParam = getQuery("open_revs"); + if (openRevsParam == null || isLocalDoc) { + // Regular GET: + String revID = getQuery("rev"); // often null + TDRevision rev = null; + if (isLocalDoc) { + rev = db.getLocalDocument(docID, revID); + } else { + rev = db.getDocumentWithIDAndRev(docID, revID, options); + // Handle ?atts_since query by stubbing out older attachments: + // ?atts_since parameter - value is a (URL-encoded) JSON array + // of one or more revision IDs. + // The response will include the content of only those + // attachments that changed since the given revision(s). + // (You can ask for this either in the default JSON or as + // multipart/related, as previously described.) + List attsSince = (List) getJSONQuery("atts_since"); + if (attsSince != null) { + String ancestorId = db.findCommonAncestorOf(rev, attsSince); + if (ancestorId != null) { + int generation = TDRevision + .generationFromRevID(ancestorId); + db.stubOutAttachmentsIn(rev, generation + 1); + } + } + } + if (rev == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + if (cacheWithEtag(rev.getRevId())) { + return new TDStatus(TDStatus.NOT_MODIFIED); // set ETag and + // check conditional + // GET + } + + connection.setResponseBody(rev.getBody()); + } else { + List> result = null; + if (openRevsParam.equals("all")) { + // Get all conflicting revisions: + TDRevisionList allRevs = db.getAllRevisionsOfDocumentID(docID, + true); + result = new ArrayList>(allRevs.size()); + for (TDRevision rev : allRevs) { + TDStatus status = db.loadRevisionBody(rev, options); + if (status.isSuccessful()) { + Map dict = new HashMap(); + dict.put("ok", rev.getProperties()); + result.add(dict); + } else if (status.getCode() != TDStatus.INTERNAL_SERVER_ERROR) { + Map dict = new HashMap(); + dict.put("missing", rev.getRevId()); + result.add(dict); + } else { + return status; // internal error getting revision + } + } + } else { + // ?open_revs=[...] returns an array of revisions of the + // document: + List openRevs = (List) getJSONQuery("open_revs"); + if (openRevs == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + result = new ArrayList>(openRevs.size()); + for (String revID : openRevs) { + TDRevision rev = db.getDocumentWithIDAndRev(docID, revID, + options); + if (rev != null) { + Map dict = new HashMap(); + dict.put("ok", rev.getProperties()); + result.add(dict); + } else { + Map dict = new HashMap(); + dict.put("missing", revID); + result.add(dict); + } + } + } + String acceptMultipart = getMultipartRequestType(); + if (acceptMultipart != null) { + // FIXME figure out support for multipart + throw new UnsupportedOperationException(); + } else { + connection.setResponseBody(new TDBody(result)); + } + } + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_Attachment(TDDatabase _db, String docID, + String _attachmentName) { + // http://wiki.apache.org/couchdb/HTTP_Document_API#GET + EnumSet options = getContentOptions(); + options.add(TDContentOptions.TDNoBody); + String revID = getQuery("rev"); // often null + TDRevision rev = db.getDocumentWithIDAndRev(docID, revID, options); + if (rev == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + if (cacheWithEtag(rev.getRevId())) { + return new TDStatus(TDStatus.NOT_MODIFIED); // set ETag and check + // conditional GET + } + + String type = null; + TDStatus status = new TDStatus(); + String acceptEncoding = connection + .getRequestProperty("Accept-Encoding"); + TDAttachment contents = db.getAttachmentForSequence(rev.getSequence(), + _attachmentName, status); + + if (contents == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + type = contents.getContentType(); + if (type != null) { + connection.getResHeader().add("Content-Type", type); + } + if (acceptEncoding != null && acceptEncoding.equals("gzip")) { + connection.getResHeader().add("Content-Encoding", acceptEncoding); + } + + connection.setResponseInputStream(contents.getContentStream()); + return new TDStatus(TDStatus.OK); + } + + /** + * NOTE this departs from the iOS version, returning revision, passing + * status back by reference + */ + public TDRevision update(TDDatabase _db, String docID, TDBody body, + boolean deleting, boolean allowConflict, TDStatus outStatus) { + boolean isLocalDoc = docID != null && docID.startsWith(("_local")); + String prevRevID = null; + + if (!deleting) { + Boolean deletingBoolean = (Boolean) body + .getPropertyForKey("deleted"); + deleting = (deletingBoolean != null && deletingBoolean + .booleanValue()); + if (docID == null) { + if (isLocalDoc) { + outStatus.setCode(TDStatus.METHOD_NOT_ALLOWED); + return null; + } + // POST's doc ID may come from the _id field of the JSON body, + // else generate a random one. + docID = (String) body.getPropertyForKey("_id"); + if (docID == null) { + if (deleting) { + outStatus.setCode(TDStatus.BAD_REQUEST); + return null; + } + docID = TDDatabase.generateDocumentId(); + } + } + // PUT's revision ID comes from the JSON body. + prevRevID = (String) body.getPropertyForKey("_rev"); + } else { + // DELETE's revision ID comes from the ?rev= query param + prevRevID = getQuery("rev"); + } + + // A backup source of revision ID is an If-Match header: + if (prevRevID == null) { + prevRevID = getRevIDFromIfMatchHeader(); + } + + TDRevision rev = new TDRevision(docID, null, deleting); + rev.setBody(body); + + TDRevision result = null; + TDStatus tmpStatus = new TDStatus(); + if (isLocalDoc) { + result = _db.putLocalRevision(rev, prevRevID, tmpStatus); + } else { + result = _db.putRevision(rev, prevRevID, allowConflict, tmpStatus); + } + outStatus.setCode(tmpStatus.getCode()); + return result; + } + + public TDStatus update(TDDatabase _db, String docID, + Map bodyDict, boolean deleting) { + TDBody body = new TDBody(bodyDict); + TDStatus status = new TDStatus(); + TDRevision rev = update(_db, docID, body, deleting, false, status); + if (status.isSuccessful()) { + cacheWithEtag(rev.getRevId()); // set ETag + if (!deleting) { + URL url = connection.getURL(); + String urlString = url.toExternalForm(); + if (docID != null) { + urlString += "/" + rev.getDocId(); + try { + url = new URL(urlString); + } catch (MalformedURLException e) { + Log.w("Malformed URL", e); + } + } + setResponseLocation(url); + } + Map result = new HashMap(); + result.put("ok", true); + result.put("id", rev.getDocId()); + result.put("rev", rev.getRevId()); + connection.setResponseBody(new TDBody(result)); + } + return status; + } + + public TDStatus do_PUT_Document(TDDatabase _db, String docID, + String _attachmentName) { + Map bodyDict = getBodyAsDictionary(); + if (bodyDict == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + + if (getQuery("new_edits") == null + || (getQuery("new_edits") != null && (new Boolean( + getQuery("new_edits"))))) { + // Regular PUT + return update(_db, docID, bodyDict, false); + } else { + // PUT with new_edits=false -- forcible insertion of existing + // revision: + TDBody body = new TDBody(bodyDict); + TDRevision rev = new TDRevision(body); + if (rev.getRevId() == null || rev.getDocId() == null + || !rev.getDocId().equals(docID)) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + List history = TDDatabase.parseCouchDBRevisionHistory(body + .getProperties()); + return db.forceInsert(rev, history, null); + } + } + + public TDStatus do_DELETE_Document(TDDatabase _db, String docID, + String _attachmentName) { + return update(_db, docID, null, true); + } + + public TDStatus updateAttachment(String attachment, String docID, + InputStream contentStream) { + TDStatus status = new TDStatus(); + String revID = getQuery("rev"); + if (revID == null) { + revID = getRevIDFromIfMatchHeader(); + } + TDRevision rev = db.updateAttachment(attachment, contentStream, + connection.getRequestProperty("content-type"), docID, revID, + status); + if (status.isSuccessful()) { + Map resultDict = new HashMap(); + resultDict.put("ok", true); + resultDict.put("id", rev.getDocId()); + resultDict.put("rev", rev.getRevId()); + connection.setResponseBody(new TDBody(resultDict)); + cacheWithEtag(rev.getRevId()); + if (contentStream != null) { + setResponseLocation(connection.getURL()); + } + } + return status; + } + + public TDStatus do_PUT_Attachment(TDDatabase _db, String docID, + String _attachmentName) { + return updateAttachment(_attachmentName, docID, + connection.getRequestInputStream()); + } + + public TDStatus do_DELETE_Attachment(TDDatabase _db, String docID, + String _attachmentName) { + return updateAttachment(_attachmentName, docID, null); + } + + /** VIEW QUERIES: **/ + + public TDView compileView(String viewName, Map viewProps) { + String language = (String) viewProps.get("language"); + if (language == null) { + language = "javascript"; + } + String mapSource = (String) viewProps.get("map"); + if (mapSource == null) { + return null; + } + TDViewMapBlock mapBlock = TDView.getCompiler().compileMapFunction( + mapSource, language); + if (mapBlock == null) { + Log.w(TDDatabase.TAG, String + .format("View %s has unknown map function: %s", viewName, + mapSource)); + return null; + } + String reduceSource = (String) viewProps.get("reduce"); + TDViewReduceBlock reduceBlock = null; + if (reduceSource != null) { + reduceBlock = TDView.getCompiler().compileReduceFunction( + reduceSource, language); + if (reduceBlock == null) { + Log.w(TDDatabase.TAG, String.format( + "View %s has unknown reduce function: %s", viewName, + reduceBlock)); + return null; + } + } + + TDView view = db.getViewNamed(viewName); + view.setMapReduceBlocks(mapBlock, reduceBlock, "1"); + String collation = (String) viewProps.get("collation"); + if ("raw".equals(collation)) { + view.setCollation(TDViewCollation.TDViewCollationRaw); + } + return view; + } + + public TDStatus queryDesignDoc(String designDoc, String viewName, + List keys) { + String tdViewName = String.format("%s/%s", designDoc, viewName); + TDView view = db.getExistingViewNamed(tdViewName); + if (view == null || view.getMapBlock() == null) { + // No TouchDB view is defined, or it hasn't had a map block + // assigned; + // see if there's a CouchDB view definition we can compile: + TDRevision rev = db.getDocumentWithIDAndRev( + String.format("_design/%s", designDoc), null, + EnumSet.noneOf(TDContentOptions.class)); + if (rev == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + Map views = (Map) rev + .getProperties().get("views"); + Map viewProps = (Map) views + .get(viewName); + if (viewProps == null) { + return new TDStatus(TDStatus.NOT_FOUND); + } + // If there is a CouchDB view, see if it can be compiled from + // source: + view = compileView(tdViewName, viewProps); + if (view == null) { + return new TDStatus(TDStatus.INTERNAL_SERVER_ERROR); + } + } + + TDQueryOptions options = new TDQueryOptions(); + + // if the view contains a reduce block, it should default to reduce=true + if (view.getReduceBlock() != null) { + options.setReduce(true); + } + + if (!getQueryOptions(options)) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + if (keys != null) { + options.setKeys(keys); + } + + TDStatus status = view.updateIndex(); + if (!status.isSuccessful()) { + return status; + } + + long lastSequenceIndexed = view.getLastSequenceIndexed(); + + // Check for conditional GET and set response Etag header: + if (keys == null) { + long eTag = options.isIncludeDocs() ? db.getLastSequence() + : lastSequenceIndexed; + if (cacheWithEtag(String.format("%d", eTag))) { + return new TDStatus(TDStatus.NOT_MODIFIED); + } + } + + List> rows = view.queryWithOptions(options, status); + if (rows == null) { + return status; + } + + Map responseBody = new HashMap(); + responseBody.put("rows", rows); + responseBody.put("total_rows", rows.size()); + responseBody.put("offset", options.getSkip()); + if (options.isUpdateSeq()) { + responseBody.put("update_seq", lastSequenceIndexed); + } + connection.setResponseBody(new TDBody(responseBody)); + return new TDStatus(TDStatus.OK); + } + + public TDStatus do_GET_DesignDocument(TDDatabase _db, String designDocID, + String viewName) { + return queryDesignDoc(designDocID, viewName, null); + } + + public TDStatus do_POST_DesignDocument(TDDatabase _db, String designDocID, + String viewName) { + Map bodyDict = getBodyAsDictionary(); + if (bodyDict == null) { + return new TDStatus(TDStatus.BAD_REQUEST); + } + List keys = (List) bodyDict.get("keys"); + return queryDesignDoc(designDocID, viewName, keys); + } + + @Override + public String toString() { + String url = "Unknown"; + if (connection != null && connection.getURL() != null) { + url = connection.getURL().toExternalForm(); + } + return String.format("TDRouter [%s]", url); + } } diff --git a/TouchDB-Android/src/com/couchbase/touchdb/support/TDBatcher.java b/TouchDB-Android/src/com/couchbase/touchdb/support/TDBatcher.java index 175f99e..3ea078c 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/support/TDBatcher.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/support/TDBatcher.java @@ -2,17 +2,13 @@ import java.util.ArrayList; import java.util.List; -import java.util.concurrent.Future; -import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; -import android.os.Handler; import android.util.Log; import com.couchbase.touchdb.TDDatabase; -import com.couchbase.touchdb.TDRevision; /** * Utility that queues up objects until the queue fills up or a time interval elapses, @@ -48,7 +44,7 @@ public TDBatcher(ScheduledExecutorService workExecutor, int capacity, int delay, this.processor = processor; } - public void processNow() { + public void processNow() { List toProcess = null; synchronized(this) { if(inbox == null || inbox.size() == 0) { diff --git a/TouchDB-Android/src/com/couchbase/touchdb/support/TDRemoteRequest.java b/TouchDB-Android/src/com/couchbase/touchdb/support/TDRemoteRequest.java index 238bdbb..5baa6b8 100644 --- a/TouchDB-Android/src/com/couchbase/touchdb/support/TDRemoteRequest.java +++ b/TouchDB-Android/src/com/couchbase/touchdb/support/TDRemoteRequest.java @@ -3,6 +3,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import org.apache.http.HttpEntity; @@ -33,7 +34,6 @@ import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HttpContext; -import android.os.Handler; import android.util.Log; import com.couchbase.touchdb.TDDatabase; @@ -41,151 +41,161 @@ public class TDRemoteRequest implements Runnable { - private ScheduledExecutorService workExecutor; - private final HttpClientFactory clientFactory; - private String method; - private URL url; - private Object body; - private TDRemoteRequestCompletionBlock onCompletion; - - public TDRemoteRequest(ScheduledExecutorService workExecutor, - HttpClientFactory clientFactory, String method, URL url, - Object body, TDRemoteRequestCompletionBlock onCompletion) { - this.clientFactory = clientFactory; - this.method = method; - this.url = url; - this.body = body; - this.onCompletion = onCompletion; - this.workExecutor = workExecutor; - } - - @Override - public void run() { - HttpClient httpClient = clientFactory.getHttpClient(); - ClientConnectionManager manager = httpClient.getConnectionManager(); - - HttpUriRequest request = null; - if (method.equalsIgnoreCase("GET")) { - request = new HttpGet(url.toExternalForm()); - } else if (method.equalsIgnoreCase("PUT")) { - request = new HttpPut(url.toExternalForm()); - } else if (method.equalsIgnoreCase("POST")) { - request = new HttpPost(url.toExternalForm()); - } - - // if the URL contains user info AND if this a DefaultHttpClient - // then preemptively set the auth credentials - if (url.getUserInfo() != null) { - if (url.getUserInfo().contains(":")) { - String[] userInfoSplit = url.getUserInfo().split(":"); - final Credentials creds = new UsernamePasswordCredentials( - userInfoSplit[0], userInfoSplit[1]); - if (httpClient instanceof DefaultHttpClient) { - DefaultHttpClient dhc = (DefaultHttpClient) httpClient; - - HttpRequestInterceptor preemptiveAuth = new HttpRequestInterceptor() { - - @Override - public void process(HttpRequest request, - HttpContext context) throws HttpException, - IOException { - AuthState authState = (AuthState) context - .getAttribute(ClientContext.TARGET_AUTH_STATE); - CredentialsProvider credsProvider = (CredentialsProvider) context - .getAttribute(ClientContext.CREDS_PROVIDER); - HttpHost targetHost = (HttpHost) context - .getAttribute(ExecutionContext.HTTP_TARGET_HOST); - - if (authState.getAuthScheme() == null) { - AuthScope authScope = new AuthScope( - targetHost.getHostName(), - targetHost.getPort()); - authState.setAuthScheme(new BasicScheme()); - authState.setCredentials(creds); - } - } - }; - - dhc.addRequestInterceptor(preemptiveAuth, 0); - } - } else { - Log.w(TDDatabase.TAG, - "Unable to parse user info, not setting credentials"); - } - } - - request.addHeader("Accept", "application/json"); - - // set body if appropriate - if (body != null && request instanceof HttpEntityEnclosingRequestBase) { - byte[] bodyBytes = null; - try { - bodyBytes = TDServer.getObjectMapper().writeValueAsBytes(body); - } catch (Exception e) { - Log.e(TDDatabase.TAG, "Error serializing body of request", e); - } - ByteArrayEntity entity = new ByteArrayEntity(bodyBytes); - entity.setContentType("application/json"); - ((HttpEntityEnclosingRequestBase) request).setEntity(entity); - } - - Object fullBody = null; - Throwable error = null; - try { - HttpResponse response = httpClient.execute(request); - StatusLine status = response.getStatusLine(); - if (status.getStatusCode() >= 300) { - Log.e(TDDatabase.TAG, - "Got error " + Integer.toString(status.getStatusCode())); - Log.e(TDDatabase.TAG, "Request was for: " + request.toString()); - Log.e(TDDatabase.TAG, - "Status reason: " + status.getReasonPhrase()); - error = new HttpResponseException(status.getStatusCode(), - status.getReasonPhrase()); - } else { - HttpEntity temp = response.getEntity(); - if (temp != null) { - try { - InputStream stream = temp.getContent(); - fullBody = TDServer.getObjectMapper().readValue(stream, - Object.class); - } finally { - try { - temp.consumeContent(); - } catch (IOException e) { - } - } - } - } - } catch (ClientProtocolException e) { - Log.e(TDDatabase.TAG, "client protocol exception", e); - error = e; - } catch (IOException e) { - Log.e(TDDatabase.TAG, "io exception", e); - error = e; - } - respondWithResult(fullBody, error); - } - - public void respondWithResult(final Object result, final Throwable error) { - if (workExecutor != null) { - workExecutor.submit(new Runnable() { - - @Override - public void run() { - try { - onCompletion.onCompletion(result, error); - } catch (Exception e) { - // don't let this crash the thread - Log.e(TDDatabase.TAG, - "TDRemoteRequestCompletionBlock throw Exception", - e); - } - } - }); - } else { - Log.e(TDDatabase.TAG, "work executor was null!!!"); - } - } + private ScheduledExecutorService workExecutor; + private final HttpClientFactory clientFactory; + private String method; + private URL url; + private Map headers; + private Object body; + private TDRemoteRequestCompletionBlock onCompletion; + + public TDRemoteRequest(ScheduledExecutorService workExecutor, + HttpClientFactory clientFactory, String method, URL url, + Map headers, Object body, + TDRemoteRequestCompletionBlock onCompletion) { + this.clientFactory = clientFactory; + this.method = method; + this.url = url; + this.headers = headers; + this.body = body; + this.onCompletion = onCompletion; + this.workExecutor = workExecutor; + } + + @Override + public void run() { + HttpClient httpClient = clientFactory.getHttpClient(); + ClientConnectionManager manager = httpClient.getConnectionManager(); + + HttpUriRequest request = null; + if (method.equalsIgnoreCase("GET")) { + request = new HttpGet(url.toExternalForm()); + } else if (method.equalsIgnoreCase("PUT")) { + request = new HttpPut(url.toExternalForm()); + } else if (method.equalsIgnoreCase("POST")) { + request = new HttpPost(url.toExternalForm()); + } + + // add headers + if(headers != null && headers.size() > 0) { + for(String key : headers.keySet()) { + request.addHeader(key, headers.get(key)); + } + } + + // if the URL contains user info AND if this a DefaultHttpClient + // then preemptively set the auth credentials + if (url.getUserInfo() != null) { + if (url.getUserInfo().contains(":")) { + String[] userInfoSplit = url.getUserInfo().split(":"); + final Credentials creds = new UsernamePasswordCredentials( + userInfoSplit[0], userInfoSplit[1]); + if (httpClient instanceof DefaultHttpClient) { + DefaultHttpClient dhc = (DefaultHttpClient) httpClient; + + HttpRequestInterceptor preemptiveAuth = new HttpRequestInterceptor() { + + @Override + public void process(HttpRequest request, + HttpContext context) throws HttpException, + IOException { + AuthState authState = (AuthState) context + .getAttribute(ClientContext.TARGET_AUTH_STATE); + CredentialsProvider credsProvider = (CredentialsProvider) context + .getAttribute(ClientContext.CREDS_PROVIDER); + HttpHost targetHost = (HttpHost) context + .getAttribute(ExecutionContext.HTTP_TARGET_HOST); + + if (authState.getAuthScheme() == null) { + AuthScope authScope = new AuthScope( + targetHost.getHostName(), + targetHost.getPort()); + authState.setAuthScheme(new BasicScheme()); + authState.setCredentials(creds); + } + } + }; + + dhc.addRequestInterceptor(preemptiveAuth, 0); + } + } else { + Log.w(TDDatabase.TAG, + "Unable to parse user info, not setting credentials"); + } + } + + request.addHeader("Accept", "application/json"); + + // set body if appropriate + if (body != null && request instanceof HttpEntityEnclosingRequestBase) { + byte[] bodyBytes = null; + try { + bodyBytes = TDServer.getObjectMapper().writeValueAsBytes(body); + } catch (Exception e) { + Log.e(TDDatabase.TAG, "Error serializing body of request", e); + } + ByteArrayEntity entity = new ByteArrayEntity(bodyBytes); + entity.setContentType("application/json"); + ((HttpEntityEnclosingRequestBase) request).setEntity(entity); + } + + Object fullBody = null; + Throwable error = null; + try { + HttpResponse response = httpClient.execute(request); + StatusLine status = response.getStatusLine(); + if (status.getStatusCode() >= 300) { + Log.e(TDDatabase.TAG, + "Got error " + Integer.toString(status.getStatusCode())); + Log.e(TDDatabase.TAG, "Request was for: " + request.toString()); + Log.e(TDDatabase.TAG, + "Status reason: " + status.getReasonPhrase()); + error = new HttpResponseException(status.getStatusCode(), + status.getReasonPhrase()); + } else { + HttpEntity temp = response.getEntity(); + if (temp != null) { + try { + InputStream stream = temp.getContent(); + fullBody = TDServer.getObjectMapper().readValue(stream, + Object.class); + } finally { + try { + temp.consumeContent(); + } catch (IOException e) { + } + } + } + } + } catch (ClientProtocolException e) { + Log.e(TDDatabase.TAG, "client protocol exception", e); + error = e; + } catch (IOException e) { + Log.e(TDDatabase.TAG, "io exception", e); + error = e; + } + respondWithResult(fullBody, error); + } + + public void respondWithResult(final Object result, final Throwable error) { + if (workExecutor != null) { + workExecutor.submit(new Runnable() { + + @Override + public void run() { + try { + onCompletion.onCompletion(result, error); + } catch (Exception e) { + // don't let this crash the thread + Log.e(TDDatabase.TAG, + "TDRemoteRequestCompletionBlock throw Exception", + e); + } + } + }); + } else { + Log.e(TDDatabase.TAG, "work executor was null!!!"); + } + } }