diff --git a/dataikuapi/dss/wiki.py b/dataikuapi/dss/wiki.py index 4d7423ca..335d1a3b 100644 --- a/dataikuapi/dss/wiki.py +++ b/dataikuapi/dss/wiki.py @@ -89,6 +89,28 @@ def create_article(self, article_name, parent_id=None, content=None): return article + def get_export_stream(self, paper_size="A4", export_attachment=False): + """ + Download the whole wiki of the project in PDF format as a binary stream. + Warning: this stream will monopolize the DSSClient until closed. + """ + body = { + "paperSize": paper_size, + "exportAttachment": export_attachment + } + return self.client._perform_raw("POST", "/projects/%s/wiki/actions/export" % (self.project_key), body=body) + + def export_to_file(self, path, paper_size="A4", export_attachment=False): + """ + Download the whole wiki of the project in PDF format into the given output file. + """ + with self.get_export_stream(paper_size=paper_size, export_attachment=export_attachment) as stream: + with open(path, 'wb') as f: + for chunk in stream.iter_content(chunk_size=10000): + if chunk: + f.write(chunk) + f.flush() + class DSSWikiSettings(object): """ Global settings for the wiki, including taxonomy. Call save() to save @@ -247,6 +269,30 @@ def get_uploaded_file(self, upload_id): """ return self.client._perform_raw("GET", "/projects/%s/wiki/%s/uploads/%s" % (self.project_key, self.article_id, upload_id)) + def get_export_stream(self, paper_size="A4", export_children=False, export_attachment=False): + """ + Download an article in PDF format as a binary stream. + Warning: this stream will monopolize the DSSClient until closed. + """ + body = { + "paperSize": paper_size, + "exportChildren": export_children, + "exportAttachment": export_attachment + } + return self.client._perform_raw("POST", "/projects/%s/wiki/%s/actions/export" % (self.project_key, self.article_id), body=body) + + def export_to_file(self, path, paper_size="A4", export_children=False, export_attachment=False): + """ + Download an article in PDF format into the given output file. + """ + with self.get_export_stream(paper_size=paper_size, export_children=export_children, export_attachment=export_attachment) as stream: + with open(path, 'wb') as f: + for chunk in stream.iter_content(chunk_size=10000): + if chunk: + f.write(chunk) + f.flush() + + def delete(self): """ Delete the article