diff --git a/.gitignore b/.gitignore
index 7aec85f0..74e04886 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,3 @@
-
# sbt specific
target/
# local sbt caching
diff --git a/.travis.yml b/.travis.yml
index ecd57b60..97668ed4 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,9 +1,8 @@
language: scala
scala:
-- 2.11.5
-- 2.10.4
+- 2.11.8
+- 2.12.1
jdk:
-- oraclejdk7
- oraclejdk8
sudo: false
cache:
diff --git a/README.md b/README.md
index 85ef10e3..e3c2a38c 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,6 @@
-[](https://travis-ci.org/dwhjames/datomisca) [ ](https://bintray.com/dwhjames/maven/datomisca/_latestVersion)
+[](https://travis-ci.org/dwhjames/datomisca) [  ](https://bintray.com/dwhjames/maven/datomisca/_latestVersion)
+
+### NOTE: This is a fork of the official, seemingly abandoned repo modified to support scala 2.12. Most of this work was done by https://github.com/flyingwalrusllc/datomisca but this finishes that and publishes it publicly to bintray.
# [Datomisca](https://dwhjames.github.io/datomisca), embrace Datomic the Scala way
diff --git a/build.sbt b/build.sbt
index 06c12e66..1f1f302e 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,103 +1,146 @@
+import sbtunidoc.Plugin.UnidocKeys._
+import ReleaseTransformations._
+import com.typesafe.sbt.SbtGhPages.GhPagesKeys._
-organization in ThisBuild := "com.github.dwhjames"
-
+organization in ThisBuild := "llc.flyingwalrus"
licenses in ThisBuild += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))
-
-version in ThisBuild := "0.7.0"
-
-
-scalaVersion in ThisBuild := "2.11.6"
-
-crossScalaVersions in ThisBuild := Seq("2.10.4", "2.11.6")
-
-scalacOptions in ThisBuild ++= Seq(
- "-deprecation",
- "-encoding", "UTF-8",
- "-feature",
- "-unchecked",
- "-Xfatal-warnings",
- "-Xfuture",
- "-Xlint",
- "-Yno-adapted-args",
- "-Ywarn-dead-code",
- "-Ywarn-numeric-widen",
- "-Ywarn-value-discard"
- )
-
-scalacOptions in ThisBuild ++= (
- if (scalaVersion.value.startsWith("2.10")) Nil
- else List("-Ywarn-unused-import")
- )
-
+scalaVersion in ThisBuild := "2.12.2"
+crossScalaVersions in ThisBuild := Seq("2.11.8", "2.12.2")
+
+val compilerOptions = Seq(
+ "-deprecation",
+ "-encoding", "UTF-8",
+ "-feature",
+ "-unchecked",
+ "-Xfatal-warnings",
+ "-Xfuture",
+ "-Xlint",
+ "-Yno-adapted-args",
+ "-Ywarn-dead-code",
+ "-Ywarn-numeric-widen",
+ "-Ywarn-value-discard"
+)
resolvers in ThisBuild ++= Seq(
- Resolver.sonatypeRepo("releases"),
- Resolver.typesafeRepo("releases"),
- "clojars" at "https://clojars.org/repo",
- "couchbase" at "http://files.couchbase.com/maven2"
- )
-
-
-shellPrompt in ThisBuild := CustomShellPrompt.customPrompt
-
-
-// configure publishing to bintray
-bintray.Plugin.bintraySettings
-
+ "clojars" at "https://clojars.org/repo"
+)
lazy val datomisca = project.
in(file(".")).
aggregate(macros, core, tests, integrationTests)
-// needed for aggregated build
-MacroSettings.settings
-
-libraryDependencies += Dependencies.Compile.datomic
-
-// disable some aggregation tasks for subprojects
-aggregate in doc := false
-
-aggregate in Keys.`package` := false
-
-aggregate in packageBin := false
-
-aggregate in packageDoc := false
-
-aggregate in packageSrc := false
-
-aggregate in publish := false
-
-aggregate in publishLocal := false
-
-aggregate in PgpKeys.publishSigned := false
-
-aggregate in PgpKeys.publishLocalSigned := false
-
-
-lazy val macros = project in file("macros")
-
-// map macros project classes and sources into root project
-mappings in (Compile, packageBin) <++= mappings in (macros, Compile, packageBin)
-
-mappings in (Compile, packageSrc) <++= mappings in (macros, Compile, packageSrc)
+lazy val tests = project.in(file("tests")).
+ settings(noPublishSettings).
+ settings(
+ name := "datomisca-tests",
+ libraryDependencies ++= Seq(
+ datomic,
+ specs2
+ ),
+ fork in Test := true,
+ publishArtifact := false
+ ).
+ dependsOn(macros, core)
+lazy val integrationTests = project.in(file("integration")).
+ settings(noPublishSettings).
+ settings(Defaults.itSettings).
+ settings(
+ name := "datomisca-tests",
+ libraryDependencies ++= Seq(
+ datomic,
+ scalatest,
+ xmlModule
+ ),
+ fork in IntegrationTest := true,
+ publishArtifact := false
+ ).
+ dependsOn(macros, core).
+ configs(IntegrationTest)
-lazy val core = project.
- in(file("core")).
+lazy val core = project.in(file("core")).
+ settings(noPublishSettings).
+ settings(
+ name := "datomisca-core",
+ libraryDependencies += datomic,
+ (sourceGenerators in Compile) += ((sourceManaged in Compile) map Boilerplate.genCore).taskValue
+ ).
dependsOn(macros)
-// map core project classes and sources into root project
-mappings in (Compile, packageBin) <++= mappings in (core, Compile, packageBin)
-
-mappings in (Compile, packageSrc) <++= mappings in (core, Compile, packageSrc)
-
-
-lazy val tests = project.
- in(file("tests")).
- dependsOn(macros, core)
-
+lazy val macros = project.in(file("macros")).
+ settings(noPublishSettings).
+ settings(
+ name := "datomisca-macros",
+ addCompilerPlugin(paradise),
+ libraryDependencies ++= Seq(
+ datomic,
+ reflect(scalaVersion.value)
+ )
+ )
-lazy val integrationTests = project.
- in(file("integration")).
- dependsOn(macros, core).
- configs(IntegrationTest)
+lazy val docs = project.in(file("docs")).
+ settings(
+ name := "Datomisca Docs",
+ moduleName := "datomisca-docs"
+ ).
+ settings(docSettings).
+ settings(noPublishSettings).
+ settings(addCompilerPlugin(paradise)).
+ dependsOn(core, macros).
+ enablePlugins(MicrositesPlugin)
+
+val baseSettings = Seq(
+ scalacOptions ++= compilerOptions
+)
+
+val docSettings = baseSettings ++ Seq(
+ micrositeName := "Datomisca",
+ micrositeDescription := "Scala API for Datomic",
+ micrositeAuthor := "Daniel James",
+ micrositeHighlightTheme := "atom-one-light",
+ micrositeHomepage := "https://xxx",
+ micrositeBaseUrl := "datomisca",
+ micrositeDocumentationUrl := "api",
+ micrositeGithubOwner := "flyingwalrusllc",
+ micrositeGithubRepo := "datomisca",
+ micrositePalette := Map(
+ "brand-primary" -> "#5B5988",
+ "brand-secondary" -> "#292E53",
+ "brand-tertiary" -> "#222749",
+ "gray-dark" -> "#49494B",
+ "gray" -> "#7B7B7E",
+ "gray-light" -> "#E5E5E6",
+ "gray-lighter" -> "#F4F3F4",
+ "white-color" -> "#FFFFFF"
+ ),
+ // addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), micrositeDocumentationUrl),
+ ghpagesNoJekyll := false,
+ scalacOptions in (ScalaUnidoc, unidoc) ++= Seq(
+ "-groups",
+ "-implicits",
+ "-doc-source-url", scmInfo.value.get.browseUrl + "/tree/master€{FILE_PATH}.scala",
+ "-sourcepath", baseDirectory.in(LocalRootProject).value.getAbsolutePath,
+ "-doc-root-content", (resourceDirectory.in(Compile).value / "rootdoc.txt").getAbsolutePath
+ ),
+ git.remoteRepo := "git@github.com:flyingwalrusllc/datomisca.git",
+ includeFilter in makeSite := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.svg" | "*.js" | "*.swf" | "*.yml" | "*.md"
+)
+
+mappings in (Compile, packageBin) ++= (mappings in (macros, Compile, packageBin)).value
+mappings in (Compile, packageSrc) ++= (mappings in (macros, Compile, packageSrc)).value
+
+mappings in (Compile, packageBin) ++= (mappings in (core, Compile, packageBin)).value
+mappings in (Compile, packageSrc) ++= (mappings in (core, Compile, packageSrc)).value
+
+val noPublishSettings = Seq(
+ publish := (),
+ publishLocal := (),
+ publishArtifact := false
+)
+
+def datomic = "com.datomic" % "datomic-free" % "0.9.5561" % Provided
+def specs2 = "org.specs2" %% "specs2-core" % "3.8.8" % Test
+def scalatest = "org.scalatest" %% "scalatest" % "3.0.1" % "it"
+def xmlModule = "org.scala-lang.modules" %% "scala-xml" % "1.0.6"
+def paradise = "org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full
+def reflect(vers: String) = "org.scala-lang" % "scala-reflect" % vers
diff --git a/core/build.sbt b/core/build.sbt
deleted file mode 100644
index 3aed33eb..00000000
--- a/core/build.sbt
+++ /dev/null
@@ -1,17 +0,0 @@
-
-name := "datomisca-core"
-
-libraryDependencies += Dependencies.Compile.datomic
-
-mappings in (Compile, packageSrc) <++=
- (sourceManaged in Compile, managedSources in Compile) map { (base, srcs) =>
- srcs pair (Path.relativeTo(base) | Path.flat)
- }
-
-(sourceGenerators in Compile) <+= (sourceManaged in Compile) map Boilerplate.genCore
-
-publish := ()
-
-publishLocal := ()
-
-publishArtifact := false
diff --git a/core/src/main/scala/datomisca/Attribute.scala b/core/src/main/scala/datomisca/Attribute.scala
index ab0f1c20..49d20e8f 100644
--- a/core/src/main/scala/datomisca/Attribute.scala
+++ b/core/src/main/scala/datomisca/Attribute.scala
@@ -16,9 +16,6 @@
package datomisca
-import scala.language.reflectiveCalls
-
-
/** The representation of Datomic attributes
*
* @constructor construct an attribute out of an ident, valueType,
diff --git a/core/src/main/scala/datomisca/DId.scala b/core/src/main/scala/datomisca/DId.scala
index 0280781f..37cb77ff 100644
--- a/core/src/main/scala/datomisca/DId.scala
+++ b/core/src/main/scala/datomisca/DId.scala
@@ -16,8 +16,6 @@
package datomisca
-import scala.language.existentials
-
import datomic.Util
sealed trait DId extends Any {
diff --git a/core/src/main/scala/datomisca/Datomic.scala b/core/src/main/scala/datomisca/Datomic.scala
index 5bd4c9e0..485e36d5 100644
--- a/core/src/main/scala/datomisca/Datomic.scala
+++ b/core/src/main/scala/datomisca/Datomic.scala
@@ -17,10 +17,7 @@
package datomisca
import scala.collection.JavaConverters._
-import scala.util.{Try, Success, Failure}
-
-import clojure.{lang => clj}
-
+import scala.util.Try
/** Main object containing:
* - all Datomic basic functions (Peer, Transactor)
diff --git a/core/src/main/scala/datomisca/DatomicMapping.scala b/core/src/main/scala/datomisca/DatomicMapping.scala
index e2d09380..d8fb4bcb 100644
--- a/core/src/main/scala/datomisca/DatomicMapping.scala
+++ b/core/src/main/scala/datomisca/DatomicMapping.scala
@@ -18,8 +18,6 @@ package datomisca
import functional.CombinatorImplicits
-import scala.language.implicitConversions
-
object DatomicMapping
extends CombinatorImplicits
{
diff --git a/core/src/main/scala/datomisca/Fact.scala b/core/src/main/scala/datomisca/Fact.scala
index c5a7b095..3bd9ddde 100644
--- a/core/src/main/scala/datomisca/Fact.scala
+++ b/core/src/main/scala/datomisca/Fact.scala
@@ -16,9 +16,6 @@
package datomisca
-import scala.language.reflectiveCalls
-
-
object Fact {
/** Creates a single assertion about the given entity id `id`.
*
diff --git a/core/src/main/scala/datomisca/Log.scala b/core/src/main/scala/datomisca/Log.scala
index 4e9e7e30..f2f5aee4 100644
--- a/core/src/main/scala/datomisca/Log.scala
+++ b/core/src/main/scala/datomisca/Log.scala
@@ -17,8 +17,6 @@
package datomisca
-import java.util.{Date => JDate}
-
/** Datomic's database log is a recording of all transaction data in historic
* order, organized for efficient access by transaction.
*
diff --git a/core/src/main/scala/datomisca/excision.scala b/core/src/main/scala/datomisca/excision.scala
index afb08489..48002718 100644
--- a/core/src/main/scala/datomisca/excision.scala
+++ b/core/src/main/scala/datomisca/excision.scala
@@ -16,7 +16,6 @@
package datomisca
-import scala.language.reflectiveCalls
import scala.collection.JavaConverters._
import java.util.Date
diff --git a/core/src/main/scala/datomisca/executioncontext/ExecutionContext.scala b/core/src/main/scala/datomisca/executioncontext/ExecutionContext.scala
index 0dc6c073..6787f256 100755
--- a/core/src/main/scala/datomisca/executioncontext/ExecutionContext.scala
+++ b/core/src/main/scala/datomisca/executioncontext/ExecutionContext.scala
@@ -16,11 +16,9 @@
package datomisca.executioncontext
-import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
-import java.util.Collection
+import java.util.concurrent.{ LinkedBlockingQueue, Executor, ExecutorService, ThreadFactory, TimeUnit, ThreadPoolExecutor }
import scala.concurrent.forkjoin._
-import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
-import scala.concurrent.duration.Duration
+import scala.concurrent.{ BlockContext, CanAwait, ExecutionContextExecutor }
import scala.util.control.NonFatal
diff --git a/core/src/main/scala/datomisca/schemaManagement.scala b/core/src/main/scala/datomisca/schemaManagement.scala
index 3d07cdf5..ea54aa60 100644
--- a/core/src/main/scala/datomisca/schemaManagement.scala
+++ b/core/src/main/scala/datomisca/schemaManagement.scala
@@ -16,8 +16,6 @@
package datomisca
-import scala.language.reflectiveCalls
-
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.Try
diff --git a/core/src/main/scala/datomisca/toAndFromDatomic.scala b/core/src/main/scala/datomisca/toAndFromDatomic.scala
index dda5b20d..39977b36 100644
--- a/core/src/main/scala/datomisca/toAndFromDatomic.scala
+++ b/core/src/main/scala/datomisca/toAndFromDatomic.scala
@@ -114,9 +114,6 @@ import java.{util => ju}
import java.util.{Date, UUID}
import java.net.URI
-import clojure.{lang => clj}
-
-
/**
* Think of FromDatomicInj[DD, T] as a type-level function: DD => T
* The implicits here construct a multi-parameter type class,
diff --git a/core/src/main/scala/datomisca/txdata.scala b/core/src/main/scala/datomisca/txdata.scala
index 06b0c6e5..f0b0b54c 100644
--- a/core/src/main/scala/datomisca/txdata.scala
+++ b/core/src/main/scala/datomisca/txdata.scala
@@ -16,9 +16,6 @@
package datomisca
-import scala.language.reflectiveCalls
-
-
trait TxData {
def toTxData: AnyRef
}
diff --git a/docs/src/main/resources/microsite/css/override.css b/docs/src/main/resources/microsite/css/override.css
new file mode 100644
index 00000000..e91a2eec
--- /dev/null
+++ b/docs/src/main/resources/microsite/css/override.css
@@ -0,0 +1,16 @@
+.technologies {
+ display: none;
+}
+
+.jumbotron {
+ background-image: none;
+}
+
+.sidebar-nav > .sidebar-brand a .brand-wrapper {
+ background-size: 36px 36px !important;
+}
+
+#site-header .navbar-wrapper .brand .icon-wrapper {
+ width: 36px;
+ background-size: 100%;
+}
diff --git a/docs/src/main/resources/microsite/data/menu.yml b/docs/src/main/resources/microsite/data/menu.yml
new file mode 100644
index 00000000..7ce62675
--- /dev/null
+++ b/docs/src/main/resources/microsite/data/menu.yml
@@ -0,0 +1,19 @@
+options:
+
+ - title: Typeclasses
+ url: typeclasses.html
+
+ - title: Features
+ url: features.html
+
+ - title: Building transaction data
+ url: txdata.html
+
+ - title: Getting Started
+ url: gettingstarted.html
+
+ - title: Philosophy
+ url: philosophy.html
+
+ - title: About Us
+ url: about-us.html
diff --git a/docs/src/main/resources/microsite/includes/references.md b/docs/src/main/resources/microsite/includes/references.md
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/docs/src/main/resources/microsite/includes/references.md
@@ -0,0 +1 @@
+
diff --git a/docs/src/main/resources/rootdoc.txt b/docs/src/main/resources/rootdoc.txt
new file mode 100644
index 00000000..dd93b0d2
--- /dev/null
+++ b/docs/src/main/resources/rootdoc.txt
@@ -0,0 +1,3 @@
+This is the API documentation for
+[[https://github.com/flyingwalrusllc/datomisca datomisca]], an
+ideomatic scala based datomic access library.
diff --git a/docs/src/main/tut/about-us.md b/docs/src/main/tut/about-us.md
new file mode 100644
index 00000000..7e163489
--- /dev/null
+++ b/docs/src/main/tut/about-us.md
@@ -0,0 +1,10 @@
+---
+layout: docs
+title: About Us
+---
+
+# About Us
+
+You can contact us on [Datomisca Google Group](https://groups.google.com/forum/?fromgroups#!forum/datomisca).
+
+The project is maintained by Daniel James ([@dwhjames](https://twitter.com/dwhjames)).
diff --git a/docs/src/main/tut/features.md b/docs/src/main/tut/features.md
new file mode 100644
index 00000000..99a9dc8d
--- /dev/null
+++ b/docs/src/main/tut/features.md
@@ -0,0 +1,315 @@
+---
+layout: docs
+title: Features
+---
+
+# Raw API Features
+
+## Reactive transactions (Asynchronous & Non-Blocking with potential execution isolation)
+Using Scala 2.10 Execution Contexts & Futures, Datomic transactions are executed by Datomisca in an asynchronous & non-blocking way managed by the provided execution context. In this way, you can control in which pool of threads you want to execute your transactor requests (communicating with remote Datomic transactor).
+
+{% highlight scala %}
+val person = Namespace("person")
+
+Datomic.transact(
+ operation1,
+ operation2,
+ operation3,
+ ...
+) map { tx =>
+ ...
+}
+{% endhighlight %}
+
+
+## Conversion between Datomic/Clojure and Scala types
+
+When Datomic entities are created or accessed, Datomic types (ie Clojure types) are retrieved. From Java API, all those types are seen as `Object` which is not really useful. So you could end into using `.asInstanceOf[T]` everywhere. Hopefully, Datomisca provides some conversion from/to Datomic types.
+
+
+{% highlight scala %}
+val s: DString = Datomic.toDatomic("toto")
+val l: DLong = Datomic.toDatomic("5L")
+
+val l: String = Datomic.fromDatomic(DString("toto"))
+val s: Long = Datomic.fromDatomic(DLong(5L))
+
+val entity = database.entity(entityId)
+val name = entity.as[String](person / "name")
+val age = entity.as[Long](person / "age")
+{% endhighlight %}
+
+
+## Compile-Time query validation & input/output parameters inference
+
+Based on Scala 2.10 Macros, Datomisca provides :
+
+- **Compile-time validation of Datomic query strings** and syntax error detection.
+- **Compile-time inference of input/output parameters** (number for now): when you execute the query, you must pass the right number of input parameters and manage the right number of output parameters.
+
+{% highlight scala %}
+// Valid query
+// produces a Query with :
+// - 2 input arguments (db and ?char)
+// - 2 output arguments (?e ?n)
+scala> Query("""
+ | [ :find ?e ?n
+ | :in $ ?char
+ | :where [ ?e :person/name ?n ]
+ | [ ?e person/character ?char ]
+ | ]
+ | """)
+res0: TypedQueryAuto2[DatomicData,DatomicData,(DatomicData, DatomicData)] = [ :find ?e ?n :in $ ?char :where [?e :person/name ?n] [?e :person/character ?char] ]
+
+
+// Invalid query with missing ":"
+// error at compile-time
+scala> Query("""
+ | [ :find ?e ?n
+ | :in $ ?char
+ | :where [ ?e :person/name ?n ]
+ | [ ?e person/character ?char ]
+ | ]
+ | """)
+:15: error: `]' expected but `p' found
+ [ ?e person/character ?char ]
+ ^
+{% endhighlight %}
+
+Datomisca is also able to manage:
+
+- datalog rules alias
+- query functions
+- cherry on cake: you can use Scala values in query using `${myval}` as in String Interpolation
+
+_In the future, based on type-safe Schema presented below, we will also be able to infer parameter types._
+
+
+## Queries as static reusable structures
+
+This is a very important idea in Datomic: **a query is a static structure** which can be built once and reused as many times as you want.
+
+{% highlight scala %}
+val query = Query("""
+ [ :find ?e ?n
+ :in $ ?char
+ :where [ ?e :person/name ?n ]
+ [ ?e :person/character ?char ]
+ ]
+""")
+
+Datomic.q( query, database, DRef(person.character/violent) ) map {
+ case (e: DLong, n: DString) => // do something
+}
+
+Datomic.q( query, database, DRef(person.character/clever) ) map {
+ case (e: DLong, n: DString) => // do something
+}
+{% endhighlight %}
+
+
+## Build transaction data programmatically
+
+You can build your operations `add` / `retract` / `addEntity` / `retractEntity` operations in a programmatic way.
+
+{% highlight scala %}
+val person = Namespace("person")
+
+Datomic.transact(
+ // Atomic Fact ops
+ Fact.add(DId(Partition.USER))(person / "name" -> "tata"),
+ Fact.retract(DId(Partition.USER))(person / "name" -> "titi"),
+ Fact.partition(Partition(Namespace.DB.PART / "mypart")),
+
+ // Entity ops
+ Entity.add(DId(Partition.USER))(
+ person / "name" -> "toto",
+ person / "age" -> 30L
+ ),
+ Entity.retract(entityId)
+) map { tx =>
+ ...
+}
+{% endhighlight %}
+
+
+## Build schemas programmatically
+
+Schema is one of the remarkable specific features of Datomic : schema attributes contrain the type and cardinality of field of Datomic entities.
+
+Schema attributes are just facts stored in Datomic in a special partition defining the parameters of an attribute:
+
+- name
+- type
+- cardinality (one/many)
+- doc
+- unicity
+- fulltext
+- ...
+
+In Datomisca, we have provided some helpers to create those attributes in a programmatic way. A Datomic schema is just a sequence of fact operations.
+
+Moreover Datomisca attributes are static-typed and as you can imagine, the attribute type can be used for extended conversion features presented herebelow.
+
+{% highlight scala %}
+val uri = "datomic:mem://datomicschemaqueryspec"
+
+val person = new Namespace("person") {
+ val character = Namespace("person.character")
+}
+
+val violent = AddIdent(person.character / "violent")
+val weak = AddIdent(person.character / "weak")
+val clever = AddIdent(person.character / "clever")
+val dumb = AddIdent(person.character / "dumb")
+
+val name = Attribute(
+ person / "name",
+ SchemaType.string,
+ Cardinality.one).withDoc("Person's name")
+
+val age = Attribute(
+ person / "age",
+ SchemaType.long,
+ Cardinality.one).withDoc("Person's age")
+
+val characters = Attribute(
+ person / "character",
+ SchemaType.ref,
+ Cardinality.many).withDoc("Person's characters")
+
+val schema = Seq(
+ // attributes
+ name, age, characters,
+ // enumerated values
+ violent, weak, clever, dumb
+)
+
+Datomic.transact(schema) map { tx =>
+ ...
+}
+{% endhighlight %}
+
+
+## Parse Datomic DTM files at runtime
+
+If you wrote your schema in a DTM file for example, you can load and parse it at runtime.
+
+Naturally doing this, you lose the power of compile-time validation.
+
+{% highlight scala %}
+// example with Datomic seattle sample schema
+val schemaIs = current.resourceAsStream("seattle-schema.dtm").get
+val schemaContent = Source.fromInputStream(schemaIs).mkString
+val schema = Datomic.parseOps(schemaContent)
+
+Datomic.transact(schema) map { tx =>
+ ...
+}
+{% endhighlight %}
+
+
+# Extended Features
+
+## Type-safe Datomic operations using Schema
+
+Based on previously described static-typed schema, you can build your operations `add` / `retract` / `addEntity` / `retractEntity` operations in a type-safe way.
+
+{% highlight scala %}
+val person = Namespace("person")
+
+object PersonSchema {
+ val name = Attribute(
+ person / "name",
+ SchemaType.string,
+ Cardinality.one).withDoc("Person's name")
+ val age = Attribute(
+ person / "age",
+ SchemaType.long,
+ Cardinality.one).withDoc("Person's name")
+ val birth = Attribute(
+ person / "birth",
+ SchemaType.instant,
+ Cardinality.one).withDoc("Person's birth date")
+}
+
+// OK
+SchemaFact.add(DId(Partition.USER))( PersonSchema.name -> "toto" )
+// ERROR at compile-time since attribute "name" is a string
+SchemaFact.add(DId(Partition.USER))( PersonSchema.name -> 123L )
+
+// OK
+val e = (SchemaEntity.newBuilder
+ += (PersonSchema.name -> "toto")
+ += (PersonSchema.age -> 45L)
+ += (PersonSchema.birth -> birthDate)
+) withId DId(Partition.USER)
+
+// ERROR at compile-time (field "name" should be a string)
+val e = (SchemaEntity.newBuilder
+ += (PersonSchema.name -> 123)
+ += (PersonSchema.age -> 45L)
+ += (PersonSchema.birth -> birthDate)
+) withId DId(Partition.USER)
+{% endhighlight %}
+
+
+## Type-safe mapping from/to Scala structure to/from Datomic entities
+
+Based on Scala typeclass conversions and pure functional combinators, we provide this tool to build mappers to convert datomic entities from/to Scala structures such as case classes, tuples or collections.
+These conversions are naturally based on previously described schema typed attributes.
+
+
+{% highlight scala %}
+import Datomic._
+import DatomicMapping._
+
+case class Person(
+ name: String, age: Long
+)
+
+object PersonSchema {
+ val name = Attribute(
+ person / "name",
+ SchemaType.string,
+ Cardinality.one).withDoc("Person's name")
+ val age = Attribute(
+ person / "age",
+ SchemaType.long,
+ Cardinality.one).withDoc("Person's name")
+ val birth = Attribute(
+ person / "birth",
+ SchemaType.instant,
+ Cardinality.one).withDoc("Person's birth date")
+ ...
+}
+
+implicit val personReader = (
+ PersonSchema.name .read[String] and
+ PersonSchema.age .read[Long] and
+ PersonSchema.birth.read[java.util.Date]
+)(Person)
+
+implicit val personWriter = (
+ PersonSchema.name .write[String] and
+ PersonSchema.age .write[Long] and
+ PersonSchema.birth.write[java.util.Date]
+)(unlift(Person.unapply))
+
+DatomicMapping.toEntity(DId(Partition.USER))(
+ Person("toto", 30L, birthDate)
+)
+
+val entity = database.entity(realEntityId)
+val p = DatomicMapping.fromEntity[Person](entity)
+
+val name = entity(PersonSchema.name)
+val age = entity(PersonSchema.age)
+val birth = entity(PersonSchema.birth)
+
+assert(
+ (p.name == name) &&
+ (p.age == age) &&
+ (p.birth == birth)
+)
+{% endhighlight %}
diff --git a/docs/src/main/tut/gettingstarted.md b/docs/src/main/tut/gettingstarted.md
new file mode 100644
index 00000000..1190b107
--- /dev/null
+++ b/docs/src/main/tut/gettingstarted.md
@@ -0,0 +1,381 @@
+---
+layout: docs
+title: Getting Started
+---
+
+# Getting Started
+
+Here is a very simple sample to get started with Datomisca.
+
+## Requirements
+
+- SBT 0.13.x
+- Scala 2.10.4
+
+## Github project
+
+You can find this sample in Datomic Github Samples [Getting-Started](https://github.com/dwhjames/datomisca/tree/master/samples/getting-started)
+
+
+## #1 Add resolvers to SBT
+
+You can add that in your `build.sbt` or `Build.scala` depending on your choice.
+
+{% highlight scala %}
+resolvers += Resolver.bintrayRepo("dwhjames", "maven")
+// to get Datomic free (for pro, you must put in your own repo or local)
+resolvers += "clojars" at "https://clojars.org/repo"
+{% endhighlight %}
+
+## #2 Add dependencies
+
+> The latest release is {{ site.latestrelease }}
+
+{% highlight scala %}
+libraryDependencies ++= Seq(
+ "com.github.dwhjames" %% "datomisca" % "{{ site.latestrelease }}",
+ "com.datomic" % "datomic-free" % "0.9.4724"
+)
+{% endhighlight %}
+
+## #3 Add imports
+
+The following imports should be sufficient to get you started.
+
+{% highlight scala %}
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import datomisca._
+{% endhighlight %}
+
+
+## #4 Create a connection
+
+To use Datomisca, you need an implicit connection to Datomic in your scope.
+
+{% highlight scala %}
+// Datomic URI definition
+val uri = "datomic:mem://datomisca-getting-started"
+
+// Datomic Connection as an implicit in scope
+implicit val conn = Datomic.connect(uri)
+{% endhighlight %}
+
+> Datomic’s public API is threadsafe, and there is no need to pool the Datomic connection. Datomic will return the same instance of Connection for a given URI, no matter how many times you ask. And Datomic will cache that single instance even if you don't. ([Stuart Halloway 2013-06-25](https://groups.google.com/d/msg/datomic/ekwfTZbMCaE/GL4J0AyonI8J))
+
+
+## #5 Create a database
+
+We start from scratch so let's first create a DB.
+
+{% highlight scala %}
+Datomic.createDatabase(uri)
+{% endhighlight %}
+
+> This method returns a boolean. If true, then a fresh database was created, or else a database already existed for the given URI.
+
+
+## #6 Create a schema
+
+Datomisca allows to define your Schema in a programmatic way.
+
+Here you create your:
+
+- namespaces,
+- attributes
+- enumerated entities
+
+Attributes and enumerated entites are gathered in a schema representing your entity.
+
+Let's create four attributes to represent a `Person`:
+
+- `name: SchemaType.string, Cardinality.one`
+- `home: SchemaType.string, Cardinality.one`
+- `birth: SchemaType.instant, Cardinality.one`
+- `hobbies: SchemaType.ref, Cardinality.many`
+
+{% highlight scala %}
+object PersonSchema {
+ // Namespaces definition to be reused in Schema
+ object ns {
+ val person = new Namespace("person") {
+ val hobby = Namespace("person.hobby")
+ }
+ }
+
+ // Attributes
+ val name = Attribute(
+ ns.person / "name",
+ SchemaType.string,
+ Cardinality.one).withDoc("Person's name")
+ val home = Attribute(
+ ns.person / "home",
+ SchemaType.string,
+ Cardinality.one).withDoc("Person's hometown")
+ val birth = Attribute(
+ ns.person / "birth",
+ SchemaType.instant,
+ Cardinality.one).withDoc("Person's birth date")
+ val hobbies = Attribute(
+ ns.person / "hobbies",
+ SchemaType.ref,
+ Cardinality.many).withDoc("Person's hobbies")
+
+ // hobby enumerated values
+ val movies = AddIdent(ns.person.hobby / "movies")
+ val music = AddIdent(ns.person.hobby / "music")
+ val reading = AddIdent(ns.person.hobby / "reading")
+ val sports = AddIdent(ns.person.hobby / "sports")
+ val travel = AddIdent(ns.person.hobby / "travel")
+
+ // Schema
+ val txData: Seq[TxData] = Seq(
+ name, home, birth, characters, // attributes
+ movies, music, reading, sports, travel // ident entities
+ )
+
+}
+
+{% endhighlight %}
+
+ - `Namespace` is just a helper making our code clearer when creating keywords.
+ - `Attribute` and `AddIdent` are helpers for creating Datomic schema data.
+ - the `PersonSchema` and `ns` objects are our idiom for gathering schema information, but feel free to organize your schemas as you see fit.
+
+
+## #7 Transact your schema
+
+Now we have a schema, let's transact it into our database. This is our first
+operation using the transactor and as you may know, Datomisca manages
+transactor's communication in an asynchronous and non-blocking way based on
+Scala 2.10 Execution Context.
+
+To ask the transaction to perform some operations, we use the following method:
+
+{% highlight scala %}
+Datomic.transact(txData: TraversableOnce[TxData])(implicit conn: Connection, ec: ExecutionContext): Future[TxReport]
+{% endhighlight %}
+
+As you can see, it accepts a collection of transaction data and returns a `Future[TxReport]`.
+
+> If you are unfamilar with Scala Future, then consult [this overview](http://docs.scala-lang.org/overviews/core/futures.html).
+
+So let's transact our schema into Datomic:
+
+{% highlight scala %}
+Datomic.transact(PersonSchema.txData) flatMap { tx =>
+ ...
+ // do something
+ ...
+}
+{% endhighlight %}
+
+> We use `flatMap` because we expect to perform other asynchronous operations upon the completion of the transaction.
+
+
+## #8 Define your first entity
+
+The following code will construct the transaction data for a person called
+John, whose hometown is Brooklyn, was born on Jan, 1 1980, and likes
+travelling and watching movies.
+
+{% highlight scala %}
+// John temporary ID
+val johnId = DId(Partition.USER)
+// John person entity
+val john: TxData = (
+ SchemaEntity.newBuilder
+ += (PersonSchema.name -> "John")
+ += (PersonSchema.home -> "Brooklyn, NY")
+ += (PersonSchema.birth -> new java.util.Date(80, 0, 1))
+ ++= (PersonSchema.hobbies -> Set(PersonSchema.movies, PersonSchema.travel))
+ ) withId johnId
+{% endhighlight %}
+
+The transaction data `john` is equivalent to the following Clojure map.
+
+{% highlight clojure %}
+(let [johnId (d/tempid :db.part/user)]
+ {:db/id (d/tempid :db.part/user)
+ :person/name "John"
+ :person/home "Brooklyn, NY"
+ :person/birth (java.util.Date 80 0 1)
+ :person/hobbies [:person.hobby/movies :person.hobby/travel]})
+{% endhighlight %}
+
+In Datomisca, the `DId` type is one of the ways of constructing entity
+ids, and here we are constructing a temporary entity id in the user partition.
+
+The `SchemaEntity` builder follows Scala’s `Builder` for collections. This is
+an idiom for incrementally building collections. To build up transaction data
+for a new entity, we use attribute–value pairs, rather than keyword–value
+pairs. This provides a level of type-safety, as the attribute stores the
+schema type and the cardinality along with the keyword ident. The value of the
+pair is statically checked against the attribute’s type and cardinality.
+
+
+## #9 Transact your entity
+
+Transacting regular data and schema data is no different.
+
+{% highlight scala %}
+// creates an entity
+Datomic.transact(john) map { tx =>
+ val realJohnId = tx.resolve(johnId)
+ ...
+ // Do something else
+}
+{% endhighlight %}
+
+- `tx.resolve(johnId)` is used to retrieve the real Id after insertion from temporary Id.
+- you can also retrieve several ID at the same time:
+
+{% highlight scala %}
+val Seq(realId1, realId2, realId3) = tx.resolve(id1, id2, id3)
+{% endhighlight %}
+
+
+## #10 Write a query
+
+So now that we have an entity in our DB, let's try to query for it.
+
+In Datomisca, you **write your queries in Datalog exactly in the same way as
+Clojure**. Leveraging Scala’s 2.10 macros, Datomisca **validates the syntax of
+your query at compile-time and also deduces the number of input/output
+parameters** (more features are also in the roadmap).
+
+Let's write a "find person by name" query:
+
+{% highlight scala %}
+val queryFindByName = Query("""
+ [:find ?e ?home
+ :in $ ?name
+ :where
+ [?e :person/name ?name]
+ [?e :person/home ?home]]
+""")
+{% endhighlight %}
+
+- This creates a query that accepts two input parameters (`$` and `?age`) and returning two ouput parameters (`?e` and `?home`)
+- the query is a static structure and you can declare it once and reuse it as much as you want.
+- `$` identifies the database as an input data source
+- `?name` is our "by name" input parameter
+
+Datomisca’s query macro also supports
+[string interpolation](http://docs.scala-lang.org/overviews/core/string-interpolation.html),
+which means that the query can be written as follows.
+
+{% highlight scala %}
+val queryFindByName = Query(s"""
+ [:find ?e ?home
+ :in $$ ?name
+ :where
+ [?e ${PersonSchema.name} ?name]
+ [?e ${PersonSchema.home} ?home]]
+""")
+{% endhighlight %}
+
+Remember to watch out for escaping the datasource `$` as `$$`. The `toString`
+method is called on the values of expressions that are interpolated. The
+string representation of attributes is their keyword, which is why we can
+rewrite the query this way. The query treats expressions of type `String`
+specially, by double quoting them, so,
+
+{% highlight scala %}
+val name = "John"
+val queryFindByName = Query(s"""
+ [:find ?e ?home
+ :in $$
+ :where
+ [?e ${PersonSchema.name} $name]
+ [?e ${PersonSchema.home} ?home]]
+""")
+{% endhighlight %}
+
+will result in a query with a clause
+
+{% highlight clojure %}
+[?e :person/name "John"]
+{% endhighlight %}
+
+
+## #11 Execute a query
+
+Queries are executed using the `Datomic.q` method, with your query and the appropriate input parameters.
+
+{% highlight scala %}
+val results = Datomic.q(queryFindByName, conn.database, "John")
+{% endhighlight %}
+
+- `Datomic.q` expects a query and the right number of input parameters according to your query (here two)
+- `conn.database` is the ‘current’ database value available from the connection `conn`.
+
+
+## #12 Use the query result
+
+The query results are bound to the name `results`.
+According to the input query, the compiler has inferred that there should be two output parameters.
+
+Thus, `results` is a `Iterable[(Any, Any)]`.
+
+{% highlight scala %}
+results.headOption map {
+ case (eid: Long, home: String) =>
+ ...
+ // do something
+}
+{% endhighlight %}
+
+Note that results is a `Iterable[(Any, Any)]` and not `Iterable[(Long, String)]`
+as you might hope. Why? Because with the info provided in the query,
+it's impossible to infer those types directly. In the future, we hope to
+extend the power of the query macro to provide type-safety for output
+parameters using schema information. Therefore, for now, you must type match with
+a `case`.
+
+
+## #13 Traverse the entity graph
+
+With the previous query, we retrieved `eid`, which is an entity id, and now we
+can get the entity from the database and inspect it.
+
+{% highlight scala %}
+val entity: Entity = conn.database.entity(eid)
+{% endhighlight %}
+
+As before, `conn.database` retrieves the currently available value of the
+database, and the `entity` method looks up the entity map for a given
+identifier.
+
+The [Entity]({{ site.baseurl }}/api/0.7.x/index.html#datomisca.Entity)
+and
+[RichEntity]({{ site.baseurl }}/api/0.7.x/index.html#datomisca.package$$RichEntity)
+apis provide various ways of interact with entities. The `apply` method
+on the implicit `RichEntity` allows us to use attributes rather than
+keywords to retrieve values, in a similar fashion to how we constructed
+transaction data above.
+
+{% highlight scala %}
+val johnName: String = entity(PersonSchema.name)
+val johnHome: String = entity(PersonSchema.home)
+val johnBirth: java.util.Date = entity(PersonSchema.birth)
+{% endhighlight %}
+
+The attributes possess the type information, so Datomisca computes the correct
+return type.
+
+Datomisca is able to do this for all primitives, of cardinality one or many, but
+it can’t do this for reference attributes as Datomic will return values of type
+`Entity` in most cases, but `Keyword` if the referenced entity has an ident
+attribute, which is the case here:
+
+{% highlight scala %}
+val johnHobbies = entity.read[Set[Keyword]](PersonSchema.hobbies)
+{% endhighlight %}
+
+The `read` method allows us to do a type-safe cast.
+
+
+## And much more…
+
+Read the more detailed guides and the [API docs]({{ site.baseurl }}/api/0.7.x/index.html) for more details about what was covered here.
diff --git a/docs/src/main/tut/index.md b/docs/src/main/tut/index.md
new file mode 100644
index 00000000..cfcb3ecc
--- /dev/null
+++ b/docs/src/main/tut/index.md
@@ -0,0 +1,8 @@
+---
+layout: home
+title: "Home"
+section: "home"
+---
+
+# Datomisca, Ideomatic scala access to Datomic
+
diff --git a/docs/src/main/tut/philosophy.md b/docs/src/main/tut/philosophy.md
new file mode 100644
index 00000000..a67be0e7
--- /dev/null
+++ b/docs/src/main/tut/philosophy.md
@@ -0,0 +1,45 @@
+---
+layout: docs
+title: Philosophy
+---
+
+# Philosophy
+
+## Datomic principles, without compromise
+Datomisca is a thin layer around Datomic aimed at exposing Datomic’s functionality and leveraging its full power.
+
+The key Datomic features we really love are:
+
+- A database as an immutable value
+- An explicit notion of time
+- Atomically creating and updating facts
+- A navigatable, lazy entity structure
+- A Datalog query language with rules
+- Queries as reusable static structures
+- Schemas to constrain datatypes
+- Bidirectional references
+- Database temporal exploration
+- Database state simulation without commit
+
+In our design, we are also deeply aware of the architecture of Datomic:
+
+- Single, remote, asynchronous transactor
+- Multiple, distributed peers with local cache
+
+
+## Datomic features with a Scala flavor
+
+Datomisca uses Scala features to enhance the Datomic experience for Scala developers:
+
+- Type safety
+- Asynchronicity & non-blocking patterns
+- Advanced functional programming
+- Compile-time enhancement with Scala 2.10 macros
+
+Other than the small handful of Datomic specific types, Datomic’s Java API specifies parameter and return types almost all as `Object`s, `List`s, and `Map`s.
+For example, the result of a query has type `List[List[Object]]`, signifying a set of heterogenously-typed tuples.
+In Datomisca, we provide the means to recover the dynamic types of these objects into static Scala types.
+
+An important feature of Datomic is that entities have no specific representation beyond a set of datoms with a common identity. The consequence is that it is really **easy to manipulate data in an atomic way**.
+
+In designing Datomisca, we wanted to preseve this property. That’s why **we don't focus on mapping entities to and from Scala case classes**. Nonetheless, we provide the means to construct mappings as an extension. We caution the keen case-class-mapper to see case classes as a logical view on a collection of datoms, rather than datoms corresponding to an object model.
diff --git a/docs/src/main/tut/txdata.md b/docs/src/main/tut/txdata.md
new file mode 100644
index 00000000..a0f3fa0a
--- /dev/null
+++ b/docs/src/main/tut/txdata.md
@@ -0,0 +1,233 @@
+---
+layout: docs
+title: Building transaction data
+---
+
+# Building basic transaction data
+
+
+## Build an assertion
+
+Given a temporary id and a keyword corresponding to the ident of an attribute,
+
+{% highlight scala %}
+val id = DId(Partition.USER)
+val attrKW = Datomic.KW(":attr")
+{% endhighlight %}
+
+we can construct an assertion as follows:
+
+{% highlight scala %}
+val txData: TxData = Fact.add(id)(attrKW -> "Datomisca")
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+[:db/add id :attr "Datomisca"]
+{% endhighlight %}
+
+## Build a retraction
+
+Given an existing entity id and the `:attr` keyword from above, we can construct a retraction as follows:
+
+{% highlight scala %}
+val eId: Long = …
+val txData: TxData = Fact.retract(eid)(attrKW -> "Datomisca")
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+[:db/retract eId :attr "Datomisca"]
+{% endhighlight %}
+
+
+## Build a partition assertion
+
+The `Partition` class is nothing more than a value class wrapper for a
+`Keyword`. It purpose is simply to signal intent. We can construct a keyword
+intended as a partition ident and the transactiondata to assert the partition
+as follows:
+
+{% highlight scala %}
+val partition = new Partition(Datomic.KW(":mypartition"))
+val txData: TxData = Fact.partition(partition)
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+{:db/id #db/id [:db.part/db]
+ :db/ident :mypartition
+ :db.install/_partition :db.part/db}
+{% endhighlight %}
+
+## Build an entity assertion
+
+Given a temporary id and two keywords corresponding to the idents of two attributes,
+
+{% highlight scala %}
+val id = DId(Partition.USER)
+val attr1KW = Datomic.KW(":attr1")
+val attr2KW = Datomic.KW(":attr2")
+{% endhighlight %}
+
+we can construct an entity assertion as follows:
+
+{% highlight scala %}
+val txData: TxData =
+ Entity.add(id)(
+ attr1KW -> "Datomisca",
+ attr2KW -> "Datomic"
+ )
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+{:db/id id
+ :attr1 "Datomisca"
+ :atrr2 "Datomic"}
+{% endhighlight %}
+
+
+## Build an ident entity assertion
+
+The following is a shortcut for building basic ident entities, in the case of building small enumerations:
+
+{% highlight scala %}
+val txData: TxData = AddIdent(Datomic.KW(":myident"))
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+{:db/id #db/id [:db.part/user]
+ :db/ident :myident}
+{% endhighlight %}
+
+Note that one can also supply a partition explicitly to `AddIdent` as a second argument.
+
+
+## Build an entity retraction
+
+Given an existing entity id, we can construct a retraction of the entire
+entity and all references to it as follows:
+
+{% highlight scala %}
+val eId: Long = …
+Entity.retract(eId)
+{% endhighlight %}
+
+This corresponds to:
+
+{% highlight clojure %}
+[:db.fn/retractEntity eId]
+{% endhighlight %}
+
+---
+
+# Building transaction data with schema support
+
+
+## Build a typed assertion
+
+Given a temporary id and an attribute of type string and cardinality one,
+
+{% highlight scala %}
+val id = DId(Partition.USER)
+val attr: Attribute[String, Cardinality.one.type] =
+ Attribute(
+ Datomic.KW(":attr"),
+ SchemaType.string,
+ Cardinality.one)
+{% endhighlight %}
+
+we can construct a typed assertion as follows:
+
+{% highlight scala %}
+val txData: TxData = SchemaFact.add(id)(attr -> "Datomisca")
+{% endhighlight %}
+
+A type ascription has been given to `attr` for clarity. Attributes are typed
+with their value type and their cardinality. This type information is used
+here to statically check that the type of the Scala value given is
+permissable. The attribute here has a value type of `String` and the value
+given is a string, so this expression will type check.
+
+Ultimately, the same transaction data is generated as for:
+
+{% highlight scala %}
+Fact.add(id)(attr.ident -> "Datomisca")
+{% endhighlight %}
+
+
+## Build a typed retraction
+
+Given an existing entity id and the attribute from above, we can construct a retraction as follows:
+
+{% highlight scala %}
+val eId: Long = …
+val txData: TxData = SchemaFact.retract(eid)(attr -> "Datomisca")
+{% endhighlight %}
+
+The types are handled in exactly the same way as for assertions.
+
+
+## Build a typed entity assertion
+
+Given a temporary id, an attribute of type string and cardinality one, and an
+attribute of type long and cardinality one,
+
+{% highlight scala %}
+val id = DId(Partition.USER)
+val attr1: Attribute[String, Cardinality.one.type] =
+ Attribute(
+ Datomic.KW(":attr1"),
+ SchemaType.string,
+ Cardinality.one)
+val attr2: Attribute[Long, Cardinality.one.type] =
+ Attribute(
+ Datomic.KW(":attr2"),
+ SchemaType.long,
+ Cardinality.one)
+{% endhighlight %}
+
+we can construct a typed entity assertion as follows:
+
+{% highlight scala %}
+val txData: TxData = (
+ SchemaEntity.newBuilder
+ += (attr1 -> "Datomisca")
+ += (attr2 -> 2L)
+) withId id
+{% endhighlight %}
+
+This construction is an adaptation of the collection builders from Scala’s
+collections library. A schema entity builder allows one to mutably build up
+transaction data for an entity and then seal it into an immutable result at
+the end.
+
+There are some additional methods on schema entity builders that is
+illustrated by the following snippet:
+
+{% highlight scala %}
+val attr2: Attribute[Long, Cardinality.many.type] =
+ Attribute(
+ Datomic.KW(":attr3"),
+ SchemaType.long,
+ Cardinality.many)
+
+val p: PartialAddEntity = (
+ SchemaEntity.newBuilder
+ +?= (attr1 -> Some("Datomic"))
+ ++= (attr3 -> Set(2012L, 2013L, 2014L))
+).partial()
+
+val txData = (
+ SchemaEntity.newBuilder
+ += (attr2 -> 3L)
+ ++= p
+) withId id
+{% endhighlight %}
diff --git a/docs/src/main/tut/typeclasses.md b/docs/src/main/tut/typeclasses.md
new file mode 100644
index 00000000..b1061b80
--- /dev/null
+++ b/docs/src/main/tut/typeclasses.md
@@ -0,0 +1,38 @@
+---
+layout: docs
+title: Typeclasses
+---
+
+# Type safety with type classes (Work in Progress)
+
+## Points in time
+
+The `AsPointT` type class is used when API methods require a point time. With
+the Datomic API, time is transaction time as recorded by the transactor.
+Points in time can be specified in absolute time as instances of
+`java.util.Date`, or as logical time as a basis T value (of type `Long`), or
+as an entity id for a transaction entity (of type `Long`).
+
+This type class is used by `Database.asOf`, `Database.since`,
+`Database.entidAt`, and `Log.txRange`. For example:
+
+{% highlight scala %}
+val db: Database = …
+val db1 = db.asOf(1001)
+val db2 = db.since(new java.util.Date)
+{% endhighlight %}
+
+
+## Permanent entity ids
+
+The `AsPermanentEntityId` type class is used when API methods require an
+identifier for an entity that should be already present in the database. In
+Datomic, entities are identified by their `:db/id` values of type `Long`,
+however, entities can also be identified by [lookup refs](http://docs.datomic.com/identity.html#lookup-refs).
+
+This type class is used by `Database.entity`, `Database.entid`,
+`Database.ident`, `Entity.retract`, `Fact.retract`, `SchemaFact.retract`, and
+various excision methods.
+
+
+## …
diff --git a/integration/build.sbt b/integration/build.sbt
deleted file mode 100644
index 576cffcb..00000000
--- a/integration/build.sbt
+++ /dev/null
@@ -1,23 +0,0 @@
-
-Defaults.itSettings
-
-name := "datomisca-tests"
-
-libraryDependencies += Dependencies.Compile.datomic
-
-libraryDependencies += Dependencies.IntegrationTest.scalaTest
-
-// add scala-xml dependency when needed (for Scala 2.11 and newer)
-// this mechanism supports cross-version publishing
-libraryDependencies := {
- CrossVersion.partialVersion(scalaVersion.value) match {
- case Some((2, scalaMajor)) if scalaMajor >= 11 =>
- libraryDependencies.value :+ "org.scala-lang.modules" %% "scala-xml" % "1.0.1"
- case _ =>
- libraryDependencies.value
- }
-}
-
-fork in IntegrationTest := true
-
-publishArtifact := false
diff --git a/macros/build.sbt b/macros/build.sbt
deleted file mode 100644
index a60afb44..00000000
--- a/macros/build.sbt
+++ /dev/null
@@ -1,14 +0,0 @@
-
-name := "datomisca-macros"
-
-MacroSettings.settings
-
-libraryDependencies += Dependencies.Compile.datomic
-
-unmanagedSourceDirectories in Compile += (sourceDirectory in Compile).value / s"scala_${scalaBinaryVersion.value}"
-
-publish := ()
-
-publishLocal := ()
-
-publishArtifact := false
diff --git a/macros/src/main/scala_2.11/datomisca/macros/Helper.scala b/macros/src/main/scala/datomisca/macros/Helper.scala
similarity index 99%
rename from macros/src/main/scala_2.11/datomisca/macros/Helper.scala
rename to macros/src/main/scala/datomisca/macros/Helper.scala
index a03bca76..30e9b47d 100644
--- a/macros/src/main/scala_2.11/datomisca/macros/Helper.scala
+++ b/macros/src/main/scala/datomisca/macros/Helper.scala
@@ -17,7 +17,6 @@
package datomisca
package macros
-import scala.language.experimental.macros
import scala.reflect.macros.whitebox.Context
import scala.collection.mutable
diff --git a/macros/src/main/scala_2.11/datomisca/macros/MacroImpl.scala b/macros/src/main/scala/datomisca/macros/MacroImpl.scala
similarity index 99%
rename from macros/src/main/scala_2.11/datomisca/macros/MacroImpl.scala
rename to macros/src/main/scala/datomisca/macros/MacroImpl.scala
index 6adc9304..3ec0a225 100644
--- a/macros/src/main/scala_2.11/datomisca/macros/MacroImpl.scala
+++ b/macros/src/main/scala/datomisca/macros/MacroImpl.scala
@@ -17,7 +17,6 @@
package datomisca
package macros
-import scala.language.experimental.macros
import scala.reflect.macros.whitebox.Context
import scala.collection.JavaConverters._
diff --git a/macros/src/main/scala_2.10/datomisca/macros/Helper.scala b/macros/src/main/scala_2.10/datomisca/macros/Helper.scala
deleted file mode 100644
index 631b2c82..00000000
--- a/macros/src/main/scala_2.10/datomisca/macros/Helper.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2012 Pellucid and Zenexity
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package datomisca
-package macros
-
-import scala.language.experimental.macros
-import scala.reflect.macros.Context
-
-import scala.collection.mutable
-import scala.collection.JavaConverters._
-
-import java.{lang => jl}
-import java.{math => jm}
-import clojure.{lang => clj}
-
-
-private[datomisca] class Helper[C <: Context](val c: C) {
- import c.universe._
-
- private def abortWithMessage(message: String) =
- c.abort(c.enclosingPosition, message)
-
- def literalEDN(edn: Any, stk: mutable.Stack[c.Tree] = mutable.Stack.empty[c.Tree]): c.Tree =
- edn match {
- case b: java.lang.Boolean =>
- literalBoolean(b)
- case s: java.lang.String =>
- q"$s"
- case c: java.lang.Character =>
- literalCharacter(c)
- case s: clj.Symbol =>
- literalCljSymbol(s, stk)
- case k: clj.Keyword =>
- literalCljKeyword(k)
- case l: java.lang.Long =>
- literalLong(l)
- case d: java.lang.Double =>
- literalDouble(d)
- case d: java.math.BigDecimal =>
- literalBigDecimal(d)
- case i: clj.BigInt =>
- literalCljBigInt(i)
- case r: clj.Ratio =>
- literalCljRatio(r)
- case coll: clj.PersistentVector =>
- literalVector(coll, stk)
- case coll: clj.PersistentList =>
- literalList(coll, stk)
- case coll: clj.IPersistentMap =>
- literalMap(coll, stk)
- case coll: clj.PersistentHashSet =>
- literalSet(coll, stk)
- case x =>
- if (x == null)
- abortWithMessage("nil is not supported")
- else
- abortWithMessage(s"unexpected value $x with ${x.getClass}")
- }
-
-
- def literalBoolean(b: jl.Boolean): c.Tree =
- q"new _root_.java.lang.Boolean(${b.booleanValue})"
-
-
- def literalCljSymbol(s: clj.Symbol, stk: mutable.Stack[c.Tree]): c.Tree = {
- val m = s.meta
- if (m == null) {
- if (s.getName() == "!")
- try {
- val t = stk.pop()
- if (t.tpe =:= typeOf[String]) {
- q"""_root_.datomic.Util.read("\"%s\"".format($t))"""
- } else {
- q"_root_.datomic.Util.read($t.toString)"
- }
- } catch {
- case ex: NoSuchElementException =>
- abortWithMessage("The symbol '!' is reserved by Datomisca")
- }
- else
- q"_root_.clojure.lang.Symbol.intern(${s.getNamespace()}, ${s.getName()})"
- } else {
- val metaT = literalMap(m, stk)
- q"_root_.clojure.lang.Symbol.intern(${s.getNamespace()}, ${s.getName()}).withMeta($metaT).asInstanceOf[clojure.lang.Symbol]"
- }
- }
-
-
- def literalCljKeyword(k: clj.Keyword): c.Tree =
- q"_root_.clojure.lang.Keyword.intern(${k.getNamespace()}, ${k.getName()})"
-
-
- def literalLong(l: jl.Long): c.Tree =
- q"new _root_.java.lang.Long(${l.longValue})"
-
-
- def literalDouble(d: jl.Double): c.Tree =
- q"new _root_.java.lang.Double(${d.doubleValue})"
-
-
- def literalCljBigInt(k: clj.BigInt): c.Tree =
- q"_root_.clojure.lang.BigInt.fromBigInteger(new _root_._root_.java.math.BigInteger(${k.toString}))"
-
-
- def literalCljRatio(r: clj.Ratio): c.Tree =
- q"new _root_.clojure.lang.Ratio(new _root_.java.math.BigInteger(${r.numerator.toString}), new _root_.java.math.BigInteger(${r.denominator.toString}))"
-
-
- def literalBigDecimal(d: jm.BigDecimal): c.Tree =
- q"new _root_.java.math.BigDecimal(${d.toString})"
-
-
- def literalCharacter(char: jl.Character): c.Tree =
- q"_root_.java.lang.Character.valueOf(${char.charValue()})"
-
-
- def literalVector(coll: clj.PersistentVector, stk: mutable.Stack[c.Tree]): c.Tree = {
- val args = coll.iterator.asScala.map(literalEDN(_, stk)).toList
- q"_root_.clojure.lang.PersistentVector.create(_root_.java.util.Arrays.asList(..$args))"
- }
-
-
- def literalList(coll: clj.PersistentList, stk: mutable.Stack[c.Tree]): c.Tree = {
- val args = coll.iterator.asScala.map(literalEDN(_, stk)).toList
- q"_root_.clojure.lang.PersistentList.create(_root_.java.util.Arrays.asList(..$args))"
- }
-
- def literalMap(coll: clj.IPersistentMap, stk: mutable.Stack[c.Tree]): c.Tree = {
- val freshName = newTermName(c.fresh("map$"))
- val builder = List.newBuilder[c.Tree]
- builder += q"val $freshName = new _root_.java.util.HashMap[AnyRef, AnyRef](${coll.count()})"
- for (o <- coll.iterator.asScala) {
- val e = o.asInstanceOf[clj.MapEntry]
- val keyT = literalEDN(e.key(), stk)
- val valT = literalEDN(e.`val`(), stk)
- builder += q"${freshName}.put($keyT, $valT)"
- }
- builder += q"_root_.clojure.lang.PersistentArrayMap.create($freshName)"
- q"{ ..${builder.result} }"
- }
-
-
- def literalSet(coll: clj.PersistentHashSet, stk: mutable.Stack[c.Tree]): c.Tree = {
- val args = coll.iterator.asScala.map(literalEDN(_, stk)).toList
- q"_root_.clojure.lang.PersistentHashSet.create(java.util.Arrays.asList(..$args))"
- }
-
-
- def literalQueryRules(rules: c.Tree): c.Expr[QueryRules] =
- c.Expr[QueryRules](q"new _root_.datomisca.QueryRules($rules)")
-
- def literalQuery(query: c.Tree, inputSize: Int, outputSize: Int): c.Expr[AbstractQuery] = {
- val typeArgs =
- List.fill(inputSize)(tq"AnyRef") :+
- (outputSize match {
- case 0 => tq"Unit"
- case 1 => tq"Any"
- case n =>
- val typeName = newTypeName("Tuple" + n)
- val args = List.fill(n)(tq"Any")
- tq"$typeName[..$args]"
- })
- val queryClassName =
- Select(
- Select(
- Select(
- Ident(newTermName("_root_")),
- newTermName("datomisca")),
- newTermName("gen")),
- newTypeName("TypedQuery" + inputSize))
-
- c.Expr[AbstractQuery](q"new $queryClassName[..$typeArgs]($query)")
- }
-
-}
diff --git a/macros/src/main/scala_2.10/datomisca/macros/MacroImpl.scala b/macros/src/main/scala_2.10/datomisca/macros/MacroImpl.scala
deleted file mode 100644
index f95adeba..00000000
--- a/macros/src/main/scala_2.10/datomisca/macros/MacroImpl.scala
+++ /dev/null
@@ -1,222 +0,0 @@
-/*
- * Copyright 2012 Pellucid and Zenexity
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package datomisca
-package macros
-
-import scala.language.experimental.macros
-import scala.reflect.macros.Context
-
-import scala.collection.JavaConverters._
-import scala.collection.mutable
-
-import clojure.lang.Keyword
-import clojure.{lang => clj}
-
-
-private[datomisca] object MacroImpl {
-
- // class loader hack to get Clojure to initialize
- private def withClojure[T](block: => T): T = {
- val t = Thread.currentThread()
- val cl = t.getContextClassLoader
- t.setContextClassLoader(this.getClass.getClassLoader)
- try block finally t.setContextClassLoader(cl)
- }
-
-
- private def abortWithMessage(c: Context, message: String) =
- c.abort(c.enclosingPosition, message)
-
-
- private def abortWithThrowable(c: Context, throwable: Throwable) =
- c.abort(c.enclosingPosition, throwable.getMessage)
-
-
- private def readEDN(c: Context, edn: String): AnyRef =
- try {
- withClojure { datomic.Util.read(edn) }
- } catch {
- case ex: RuntimeException =>
- abortWithThrowable(c, ex)
- }
-
-
- def KWImpl(c: Context)(str: c.Expr[String]): c.Expr[Keyword] = {
- import c.universe._
-
- str.tree match {
- case Literal(Constant(s: String)) =>
- readEDN(c, s) match {
- case kw: Keyword =>
- val helper = new Helper[c.type](c)
- c.Expr[Keyword](helper.literalCljKeyword(kw))
- case _ =>
- abortWithMessage(c, "Not a valid Clojure keyword")
- }
- case _ =>
- abortWithMessage(c, "Expected a string literal")
- }
- }
-
-
- def cljRulesImpl(c: Context)(edn: c.Expr[String]): c.Expr[QueryRules] = {
- import c.universe._
-
- edn.tree match {
- case Literal(Constant(s: String)) =>
- val edn = readEDN(c, s)
- validateCljRules(c, edn)
- val helper = new Helper[c.type](c)
- helper.literalQueryRules(helper.literalEDN(edn))
-
- case q"scala.StringContext.apply(..$parts).s(..$args)" =>
- val partsWithPlaceholders = q"""Seq(..$parts).mkString(" ! ")"""
- val strWithPlaceHolders = c.eval(c.Expr[String](c.resetLocalAttrs(partsWithPlaceholders.duplicate)))
- val edn = readEDN(c, strWithPlaceHolders)
- validateCljRules(c, edn)
- val argsStack = mutable.Stack.concat(args)
- val helper = new Helper[c.type](c)
- helper.literalQueryRules(helper.literalEDN(edn, argsStack))
-
- case _ =>
- abortWithMessage(c, "Expected a string literal")
- }
- }
-
-
- private def validateCljRules(c: Context, edn: AnyRef): Unit =
- edn match {
- case vector: clj.PersistentVector =>
- vector.iterator.asScala foreach {
- case vector: clj.PersistentVector =>
- if (vector.count == 0) abortWithMessage(c, "Expected a rule as a non-empty vector of clauses, found an empty rule")
- vector.iterator.asScala foreach { x =>
- if (x.isInstanceOf[clj.IPersistentVector] || x.isInstanceOf[clj.IPersistentList])
- if (x.asInstanceOf[clj.IPersistentCollection].count > 0)
- ()
- else
- abortWithMessage(c, s"Expected a clause as a non-empty vector or list, found an empty clause")
- else
- abortWithMessage(c, s"Expected a clause as a vector or list, found value $x with ${x.getClass}")
- }
- case x =>
- abortWithMessage(c, s"Expected a rule as a vector, found value $x with ${x.getClass}")
- }
- case x =>
- abortWithMessage(c, s"Expected a vector of rules, found value $x with ${x.getClass}")
- }
-
-
- def cljQueryImpl(c: Context)(edn: c.Expr[String]): c.Expr[AbstractQuery] = {
- import c.universe._
-
- edn.tree match {
- case Literal(Constant(s: String)) =>
- val edn = readEDN(c, s)
-
- val (query, inputSize, outputSize) = validateDatalog(c, edn)
- val helper = new Helper[c.type](c)
- helper.literalQuery(helper.literalEDN(query), inputSize, outputSize)
-
- case q"scala.StringContext.apply(..$parts).s(..$args)" =>
- val partsWithPlaceholders = q"""Seq(..$parts).mkString(" ! ")"""
- val strWithPlaceHolders = c.eval(c.Expr[String](c.resetLocalAttrs(partsWithPlaceholders.duplicate)))
- val edn = readEDN(c, strWithPlaceHolders)
- val argsStack = mutable.Stack.concat(args)
- val (query, inputSize, outputSize) = validateDatalog(c, edn)
- val helper = new Helper[c.type](c)
- val t = helper.literalEDN(query, argsStack)
- helper.literalQuery(t, inputSize, outputSize)
-
- case t =>
- abortWithMessage(c, "Expected a string literal")
- }
- }
-
-
- private def validateDatalog(c: Context, edn: AnyRef): (AnyRef, Int, Int) = {
- val query = edn match {
- case coll: clj.IPersistentMap =>
- coll
- case coll: clj.PersistentVector =>
- val iter = coll.iterator.asScala.asInstanceOf[Iterator[AnyRef]]
- transformQuery(c, iter)
- case _ =>
- abortWithMessage(c, "Expected a datalog query represented as either a map or a vector")
- }
-
- val outputSize = Option {
- query.valAt(clj.Keyword.intern(null, "find"))
- } map { findClause =>
- findClause.asInstanceOf[clj.IPersistentVector].length
- } getOrElse { abortWithMessage(c, "The :find clause is empty") }
- val inputSize = Option {
- query.valAt(clj.Keyword.intern(null, "in"))
- } map { inClause =>
- inClause.asInstanceOf[clj.IPersistentVector].length
- } getOrElse 0
-
- (query, inputSize, outputSize)
- }
-
-
- private def transformQuery(c: Context, iter: Iterator[AnyRef]): clj.IPersistentMap = {
- def isQueryKeyword(kw: clj.Keyword): Boolean = {
- val name = kw.getName
- (name == "find") || (name == "with") || (name == "in") || (name == "where")
- }
- var currKW: clj.Keyword =
- if (iter.hasNext)
- iter.next() match {
- case kw: clj.Keyword if isQueryKeyword(kw) =>
- kw
- case x =>
- abortWithMessage(c, s"Expected a query clause, found value $x with ${x.getClass}")
- }
- else
- abortWithMessage(c, "Expected a non-empty vector")
-
- val map = new clj.PersistentArrayMap(Array.empty).asTransient()
- while (iter.hasNext) {
- val clauseKW = currKW
- val buf = mutable.Buffer.empty[AnyRef]
- var shouldContinue = true
-
- while (shouldContinue && iter.hasNext) {
- iter.next() match {
- case kw: clj.Keyword =>
- if (isQueryKeyword(kw)) {
- currKW = kw
- shouldContinue = false
- } else
- abortWithMessage(c, s"Unexpected keyword $kw in datalog query")
-
- case o =>
- buf += o
- }
- }
-
- if (buf.isEmpty)
- abortWithMessage(c, s"The $clauseKW clause is empty")
-
- map.assoc(clauseKW, clj.PersistentVector.create(buf.asJava))
- }
-
- map.persistent()
- }
-
-}
diff --git a/project/CustomShellPrompt.scala b/project/CustomShellPrompt.scala
deleted file mode 100644
index 19356b43..00000000
--- a/project/CustomShellPrompt.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-import sbt._
-import Keys._
-
-object CustomShellPrompt {
-
- val Branch = """refs/heads/(.*)\s""".r
-
- def gitBranchOrSha =
- (Process("git symbolic-ref HEAD") #|| Process("git rev-parse --short HEAD")).!! match {
- case Branch(name) => name
- case sha => sha.stripLineEnd
- }
-
- val customPrompt = { state: State =>
-
- val extracted = Project.extract(state)
- import extracted._
-
- (name in currentRef get structure.data) map { name =>
- "[" + scala.Console.CYAN + name + scala.Console.RESET + "] " +
- scala.Console.BLUE + "git:(" +
- scala.Console.RED + gitBranchOrSha +
- scala.Console.BLUE + ")" +
- scala.Console.RESET + " $ "
- } getOrElse ("> ")
-
- }
-}
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
deleted file mode 100644
index 32ecf06d..00000000
--- a/project/Dependencies.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-import sbt._
-
-object Dependencies {
-
- object V {
- val macroParadise = "2.0.1"
-
- val datomic = "0.9.5130"
-
- val specs2 = "2.3.12"
- val scalaTest = "2.2.4"
- }
-
- object Compile {
- val datomic = "com.datomic" % "datomic-free" % V.datomic % "provided" exclude("org.slf4j", "slf4j-nop")
- }
-
- object Test {
- val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test"
- }
-
- object IntegrationTest {
- val scalaTest = "org.scalatest" %% "scalatest" % V.scalaTest % "it"
- }
-
-}
diff --git a/project/MacroSettings.scala b/project/MacroSettings.scala
deleted file mode 100644
index f272c9e1..00000000
--- a/project/MacroSettings.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import sbt._
-import Keys._
-
-object MacroSettings {
-
- val settings = Seq(
- addCompilerPlugin("org.scalamacros" % "paradise" % Dependencies.V.macroParadise cross CrossVersion.full),
-
- libraryDependencies <+= (scalaVersion)("org.scala-lang" % "scala-reflect" % _),
-
- libraryDependencies ++= (
- if (scalaVersion.value.startsWith("2.10")) List("org.scalamacros" %% "quasiquotes" % Dependencies.V.macroParadise)
- else Nil
- )
- )
-}
diff --git a/project/build.properties b/project/build.properties
index 748703f7..27e88aa1 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=0.13.7
+sbt.version=0.13.13
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 59512779..18cb6d0b 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,6 +1,7 @@
-
-addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.2")
-
-addSbtPlugin("me.lessis" % "bintray-sbt" % "0.1.2")
-
-addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
+addSbtPlugin("com.fortysevendeg" % "sbt-microsites" % "0.3.3")
+addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.3")
+addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.1")
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.17")
+addSbtPlugin("com.eed3si9n" % "sbt-doge" % "0.1.5")
+addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.3")
diff --git a/samples/accounts/build.sbt b/samples/accounts/build.sbt
index 90013562..f8b54e5d 100755
--- a/samples/accounts/build.sbt
+++ b/samples/accounts/build.sbt
@@ -2,18 +2,16 @@ name := "datomisca-accounts-sample"
organization := "pellucidanalytics"
-version := "0.7-SNAPSHOT"
+version := "0.1"
-scalaVersion := "2.10.2"
+scalaVersion := "2.11.8"
+crossScalaVersions := Seq("2.11.8","2.12.1")
fork in test := true
-resolvers ++= Seq(
- "Pellucid Bintray" at "http://dl.bintray.com/content/pellucid/maven",
- "clojars" at "https://clojars.org/repo"
-)
+resolvers += "clojars" at "https://clojars.org/repo"
libraryDependencies ++= Seq(
- "com.pellucid" %% "datomisca" % "0.7-SNAPSHOT",
- "com.datomic" % "datomic-free" % "0.8.4260"
+ "llc.flyingwalrus" %% "datomisca-core" % "0.7.1-SNAPSHOT",
+ "com.datomic" % "datomic-free" % "0.9.5544"
)
diff --git a/samples/getting-started/build.sbt b/samples/getting-started/build.sbt
index e88c96aa..a9093534 100755
--- a/samples/getting-started/build.sbt
+++ b/samples/getting-started/build.sbt
@@ -1,17 +1,13 @@
name := "datomisca-getting-started"
-organization := "pellucidanalytics"
+version := "0.1"
-version := "0.7-SNAPSHOT"
+scalaVersion := "2.11.8"
+crossScalaVersions := Seq("2.11.8","2.12.1")
-scalaVersion := "2.10.2"
-
-resolvers ++= Seq(
- "Pellucid Bintray" at "http://dl.bintray.com/content/pellucid/maven",
- "clojars" at "https://clojars.org/repo"
-)
+resolvers += "clojars" at "https://clojars.org/repo"
libraryDependencies ++= Seq(
- "com.pellucid" %% "datomisca" % "0.7-SNAPSHOT",
- "com.datomic" % "datomic-free" % "0.8.4260"
+ "llc.flyingwalrus" %% "datomisca-core" % "0.7.1-SNAPSHOT",
+ "com.datomic" % "datomic-free" % "0.9.5544"
)
diff --git a/samples/getting-started/src/main/scala/GettingStarted.scala b/samples/getting-started/src/main/scala/GettingStarted.scala
index b2c2760d..5c9888c5 100644
--- a/samples/getting-started/src/main/scala/GettingStarted.scala
+++ b/samples/getting-started/src/main/scala/GettingStarted.scala
@@ -132,7 +132,7 @@ object GettingStarted {
+= (PersonSchema.name -> "John")
+= (PersonSchema.age -> 31)
+= (PersonSchema.birth -> new JDate)
- += (PersonSchema.interests -> Set(PersonSchema.sports, PersonSchema.travel))
+ ++= (PersonSchema.interests -> Set(PersonSchema.sports, PersonSchema.travel))
) withId johnId
// tranasact the transaction data for the jane and john entities
diff --git a/samples/movie-graph/build.sbt b/samples/movie-graph/build.sbt
index 3330d284..7d537e3e 100644
--- a/samples/movie-graph/build.sbt
+++ b/samples/movie-graph/build.sbt
@@ -1,17 +1,13 @@
name := "datomisca-movie-graph"
-organization := "pellucidanalytics"
+version := "0.1"
-version := "0.7-SNAPSHOT"
+scalaVersion := "2.11.8"
+crossScalaVersions := Seq("2.11.8","2.12.1")
-scalaVersion := "2.10.2"
-
-resolvers ++= Seq(
- "Pellucid Bintray" at "http://dl.bintray.com/content/pellucid/maven",
- "clojars" at "https://clojars.org/repo"
-)
+resolvers += "clojars" at "https://clojars.org/repo"
libraryDependencies ++= Seq(
- "com.pellucid" %% "datomisca" % "0.7-SNAPSHOT",
- "com.datomic" % "datomic-free" % "0.8.4260"
+ "llc.flyingwalrus" %% "datomisca-core" % "0.7.1-SNAPSHOT",
+ "com.datomic" % "datomic-free" % "0.9.5544"
)
diff --git a/samples/simple-sample/build.sbt b/samples/simple-sample/build.sbt
index dfc684de..2d307548 100755
--- a/samples/simple-sample/build.sbt
+++ b/samples/simple-sample/build.sbt
@@ -1,19 +1,13 @@
name := "datomisca-simple-sample"
-organization := "pellucidanalytics"
+version := "0.1"
-version := "0.7-SNAPSHOT"
+scalaVersion := "2.11.8"
+crossScalaVersions := Seq("2.11.8","2.12.1")
-scalaVersion := "2.10.2"
-
-fork in test := true
-
-resolvers ++= Seq(
- "Pellucid Bintray" at "http://dl.bintray.com/content/pellucid/maven",
- "clojars" at "https://clojars.org/repo"
-)
+resolvers += "clojars" at "https://clojars.org/repo"
libraryDependencies ++= Seq(
- "com.pellucid" %% "datomisca" % "0.7-SNAPSHOT",
- "com.datomic" % "datomic-free" % "0.8.4260"
+ "llc.flyingwalrus" %% "datomisca-core" % "0.7.1-SNAPSHOT",
+ "com.datomic" % "datomic-free" % "0.9.5544"
)
diff --git a/scaladoc.sbt b/scaladoc.sbt
deleted file mode 100644
index e46bd7e1..00000000
--- a/scaladoc.sbt
+++ /dev/null
@@ -1,54 +0,0 @@
-import sbtunidoc.Plugin._
-import sbtunidoc.Plugin.UnidocKeys._
-import scala.util.matching.Regex.Match
-
-
-// substitue unidoc as the way to generate documentation
-unidocSettings
-
-packageDoc in Compile <<= packageDoc in ScalaUnidoc
-
-artifact in (ScalaUnidoc, packageDoc) := {
- val previous: Artifact = (artifact in (ScalaUnidoc, packageDoc)).value
- previous.copy(classifier = Some("javadoc"))
-}
-
-scalacOptions in (Compile, doc) ++=
- Seq(
- "-implicits",
- "-sourcepath", baseDirectory.value.getAbsolutePath,
- "-doc-source-url", s"https://github.com/dwhjames/datomisca/tree/v${version.value}€{FILE_PATH}.scala")
-
-
-autoAPIMappings := true
-
-apiURL := Some(url("https://dwhjames.github.io/datomisca/api/current/"))
-
-apiMappings += {
- val jarFiles = (dependencyClasspath in Compile).value.files
- def findJarFile(s: String) = jarFiles.find(file => file.toString.contains(s)).get
- val datomicJarFile = findJarFile("com.datomic/datomic-free")
- (datomicJarFile -> url("http://docs.datomic.com/javadoc/"))
-}
-
-lazy val transformJavaDocLinksTask = taskKey[Unit](
- "Transform JavaDoc links - replace #java.io.File with ?java/io/File.html"
-)
-
-transformJavaDocLinksTask := {
- val log = streams.value.log
- log.info("Transforming JavaDoc links")
- val t = (target in unidoc).value
- (t ** "*.html").get.filter(hasJavadocApiLink).foreach { f =>
- log.info("Transforming " + f)
- val newContent = javadocApiLink.replaceAllIn(IO.read(f), m =>
- "href=\"" + m.group(1) + "?" + m.group(2).replace(".", "/") + ".html")
- IO.write(f, newContent)
- }
-}
-
-val javadocApiLink = """href=\"(http://docs\.datomic\.com/javadoc/index\.html)#([^"]*)""".r
-
-def hasJavadocApiLink(f: File): Boolean = (javadocApiLink findFirstIn IO.read(f)).nonEmpty
-
-transformJavaDocLinksTask <<= transformJavaDocLinksTask triggeredBy (unidoc in Compile)
diff --git a/tests/build.sbt b/tests/build.sbt
deleted file mode 100644
index bc65efd2..00000000
--- a/tests/build.sbt
+++ /dev/null
@@ -1,10 +0,0 @@
-
-name := "datomisca-tests"
-
-libraryDependencies += Dependencies.Compile.datomic
-
-libraryDependencies += Dependencies.Test.specs2
-
-fork in Test := true
-
-publishArtifact := false
diff --git a/tests/src/test/scala/datomisca/DatomicMapping2Spec.scala b/tests/src/test/scala/datomisca/DatomicMapping2Spec.scala
index 1342d0ba..474741bd 100644
--- a/tests/src/test/scala/datomisca/DatomicMapping2Spec.scala
+++ b/tests/src/test/scala/datomisca/DatomicMapping2Spec.scala
@@ -255,7 +255,6 @@ class DatomicMapping2Spec extends Specification {
}
"read case class with ID" in {
- import scala.util.{Try, Success, Failure}
implicit val conn = Datomic.connect(uri)
@@ -313,7 +312,6 @@ class DatomicMapping2Spec extends Specification {
}
"get entity fields from attributes" in {
- import scala.util.{Try, Success, Failure}
implicit val conn = Datomic.connect(uri)
diff --git a/tests/src/test/scala/datomisca/DatomicMappingSpec.scala b/tests/src/test/scala/datomisca/DatomicMappingSpec.scala
index bbd4f12e..18a90bb6 100644
--- a/tests/src/test/scala/datomisca/DatomicMappingSpec.scala
+++ b/tests/src/test/scala/datomisca/DatomicMappingSpec.scala
@@ -222,7 +222,6 @@ class DatomicMappingSpec extends Specification {
}
"get entity fields from attributes" in {
- import scala.util.{Try, Success, Failure}
implicit val conn = Datomic.connect(uri)
diff --git a/tests/src/test/scala/datomisca/DatomicQuery2Spec.scala b/tests/src/test/scala/datomisca/DatomicQuery2Spec.scala
index af6d1a44..8616d784 100644
--- a/tests/src/test/scala/datomisca/DatomicQuery2Spec.scala
+++ b/tests/src/test/scala/datomisca/DatomicQuery2Spec.scala
@@ -18,10 +18,9 @@ package datomisca
import org.specs2.mutable._
-import org.specs2.specification.{Step, Fragments}
+import org.specs2.specification.core.Fragments
import scala.concurrent._
-import ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
@@ -47,7 +46,7 @@ class DatomicQuery2Spec extends Specification {
println("Deleted DB")
}
- override def map(fs: => Fragments) = Step(startDB) ^ fs ^ Step(stopDB)
+ override def map(fs: => Fragments) = step(startDB) ^ fs ^ step(stopDB)
"Datomic" should {
"1 - pure query" in {
diff --git a/tests/src/test/scala/datomisca/DatomicQuerySpec.scala b/tests/src/test/scala/datomisca/DatomicQuerySpec.scala
index e97200fe..52ff74de 100644
--- a/tests/src/test/scala/datomisca/DatomicQuerySpec.scala
+++ b/tests/src/test/scala/datomisca/DatomicQuerySpec.scala
@@ -18,10 +18,9 @@ package datomisca
import org.specs2.mutable._
-import org.specs2.specification.{Step, Fragments}
+import org.specs2.specification.core.Fragments
import scala.concurrent._
-import ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
@@ -47,7 +46,7 @@ class DatomicQuerySpec extends Specification {
println("Deleted DB")
}
- override def map(fs: => Fragments) = Step(startDB) ^ fs ^ Step(stopDB)
+ override def map(fs: => Fragments) = step(startDB) ^ fs ^ step(stopDB)
"Datomic" should {
"1 - pure query" in {
diff --git a/tests/src/test/scala/datomisca/DatomicTxSpec.scala b/tests/src/test/scala/datomisca/DatomicTxSpec.scala
index 550e1490..01fbb6be 100644
--- a/tests/src/test/scala/datomisca/DatomicTxSpec.scala
+++ b/tests/src/test/scala/datomisca/DatomicTxSpec.scala
@@ -20,7 +20,7 @@ import DatomicMapping._
import org.specs2.mutable._
-import org.specs2.specification.{Step, Fragments}
+import org.specs2.specification.core.Fragments
import scala.concurrent._
import ExecutionContext.Implicits.global
@@ -116,7 +116,7 @@ class DatomicTxSpec extends Specification {
println("Deleted DB")
}
- override def map(fs: => Fragments) = Step(startDB) ^ fs ^ Step(stopDB)
+ override def map(fs: => Fragments) = step(startDB) ^ fs ^ step(stopDB)
"Datomic Entity Mappings" should {
"1 - map simple entity" in {
diff --git a/version.sbt b/version.sbt
new file mode 100644
index 00000000..1938a72a
--- /dev/null
+++ b/version.sbt
@@ -0,0 +1 @@
+version in ThisBuild := "0.7.1"