1010import javax .servlet .http .HttpServletRequest ;
1111import javax .servlet .http .HttpServletResponse ;
1212
13+ import accounts .FrameworkUserManager ;
14+ import authentication .FrameworkConfiguration ;
1315import org .codehaus .jackson .map .ObjectMapper ;
1416
1517import com .hp .hpl .jena .query .Query ;
2224import com .hp .hpl .jena .rdf .model .Resource ;
2325import com .hp .hpl .jena .rdf .model .Statement ;
2426import com .hp .hpl .jena .rdf .model .StmtIterator ;
27+ import rdf .RdfStoreManager ;
28+ import util .HttpUtils ;
2529
2630
2731public class ImportRDF extends HttpServlet {
@@ -40,13 +44,15 @@ public class ImportRDF extends HttpServlet {
4044 private static String jdbcConnection ;
4145 private static String jdbcUser ;
4246 private static String jdbcPassword ;
47+ private String username ;
48+ private String token ;
4349
4450 public void doGet (HttpServletRequest request , HttpServletResponse response ) throws IOException {
4551 doPost (request , response );
4652 }
4753
4854 public void doPost (HttpServletRequest request , HttpServletResponse response ) throws IOException {
49-
55+
5056 response .setContentType ("application/json" );
5157
5258 PrintWriter out = response .getWriter ();
@@ -69,7 +75,9 @@ public void doPost(HttpServletRequest request, HttpServletResponse response) thr
6975 rdfFiles = request .getParameter ("rdfFiles" );
7076 rdfQuery = request .getParameter ("rdfQuery" );
7177 rdfQueryEndpoint = request .getParameter ("rdfQueryEndpoint" );
72-
78+ username = request .getParameter ("username" );
79+ token = HttpUtils .getCookieValue (request , "token" );
80+
7381// boolean useJDBC= false;
7482// System.out.println("rdfUrl " +rdfUrl);
7583// System.out.println("rdfFiles " +rdfFiles);
@@ -127,7 +135,7 @@ public void doPost(HttpServletRequest request, HttpServletResponse response) thr
127135// ujdbc.loadLocalFile(file, graph);
128136// }
129137
130- private static int queryImport (String destEndpoint , String graph , String sourceEndpoint , String sparql ) throws Exception {
138+ private int queryImport (String destEndpoint , String graph , String sourceEndpoint , String sparql ) throws Exception {
131139 Query query = QueryFactory .create (sparql );
132140 QueryExecution qexec = QueryExecutionFactory .sparqlService (sourceEndpoint , query );
133141 Model model = qexec .execConstruct ();
@@ -136,7 +144,13 @@ private static int queryImport(String destEndpoint, String graph, String sourceE
136144 return inserted ;
137145 }
138146
139- private static int httpUpdate (String endpoint , String graph , Model model ) throws Exception {
147+ private int httpUpdate (String endpoint , String graph , Model model ) throws Exception {
148+ RdfStoreManager rdfStoreManager = null ;
149+ if (username !=null && !username .isEmpty () && token !=null && !token .isEmpty ()) {
150+ FrameworkUserManager frameworkUserManager = FrameworkConfiguration .getInstance ().getFrameworkUserManager ();
151+ if (frameworkUserManager .checkToken (username , token ))
152+ rdfStoreManager = frameworkUserManager .getRdfStoreManager (username );
153+ }
140154
141155 // generate queries of 100 lines each
142156 StmtIterator stmts = model .listStatements ();
@@ -189,15 +203,22 @@ private static int httpUpdate(String endpoint, String graph, Model model) throws
189203
190204 ByteArrayOutputStream os = new ByteArrayOutputStream ();
191205 tmpModel .write (os , "N-TRIPLES" );
192- String queryString = "INSERT { " + os .toString () + "}" ;
193- os .close ();
194-
195- HttpSPARQLUpdate p = new HttpSPARQLUpdate ();
196- p .setEndpoint (endpoint );
197- p .setGraph (graph );
198- p .setUpdateString (queryString );
199-
200- if (! p .execute ()) throw new Exception ("UPDATE/SPARQL failed: " + queryString );
206+
207+ if (rdfStoreManager !=null ) {
208+ String queryString = "INSERT DATA { GRAPH <" + graph + "> { " + os .toString () + " } }" ;
209+ os .close ();
210+ rdfStoreManager .execute (queryString , null );
211+ } else {
212+ String queryString = "INSERT { " + os .toString () + "}" ;
213+ os .close ();
214+
215+ HttpSPARQLUpdate p = new HttpSPARQLUpdate ();
216+ p .setEndpoint (endpoint );
217+ p .setGraph (graph );
218+ p .setUpdateString (queryString );
219+
220+ if (! p .execute ()) throw new Exception ("UPDATE/SPARQL failed: " + queryString );
221+ }
201222
202223 total += linesCount ;
203224 linesCount = 0 ;
@@ -210,15 +231,22 @@ private static int httpUpdate(String endpoint, String graph, Model model) throws
210231
211232 ByteArrayOutputStream os = new ByteArrayOutputStream ();
212233 tmpModel .write (os , "N-TRIPLES" );
213- String queryString = "INSERT { " + os .toString () + "}" ;
214- os .close ();
215234
216- HttpSPARQLUpdate p = new HttpSPARQLUpdate ();
217- p .setEndpoint (endpoint );
218- p .setGraph (graph );
219- p .setUpdateString (queryString );
220-
221- if (!p .execute ()) throw new Exception ("UPDATE/SPARQL failed: " + queryString );
235+ if (rdfStoreManager !=null ) {
236+ String queryString = "INSERT DATA { GRAPH <" + graph + "> { " + os .toString () + "} }" ;
237+ os .close ();
238+ rdfStoreManager .execute (queryString , null );
239+ } else {
240+ String queryString = "INSERT { " + os .toString () + "}" ;
241+ os .close ();
242+
243+ HttpSPARQLUpdate p = new HttpSPARQLUpdate ();
244+ p .setEndpoint (endpoint );
245+ p .setGraph (graph );
246+ p .setUpdateString (queryString );
247+
248+ if (!p .execute ()) throw new Exception ("UPDATE/SPARQL failed: " + queryString );
249+ }
222250
223251 total += linesCount ;
224252
0 commit comments