Skip to content

Commit 3aea391

Browse files
authored
Merge pull request #3 from lighthouse-web3/v0.1.2
V0.1.2- File Download Feature
2 parents 79e61aa + b1a2bc5 commit 3aea391

File tree

8 files changed

+177
-19
lines changed

8 files changed

+177
-19
lines changed

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,4 +158,7 @@ cython_debug/
158158
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
159159
# and can be added to the global gitignore or merged into this file. For a more nuclear
160160
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
161-
#.idea/
161+
#.idea/
162+
src/lighthouseweb3/functions/download.py
163+
164+
image.png

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
"Programming Language :: Python :: 3.8",
2525
"Programming Language :: Python :: 3.9",
2626
"Programming Language :: Python :: 3.10",
27+
"Programming Language :: Python :: 3.11",
2728
],
2829
keywords="lighthouse storage sdk python filecoin ipfs web3 perpetual",
2930
long_description=(

src/lighthouseweb3/__init__.py

Lines changed: 65 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import os
44
import io
55
from typing import List
6-
from .functions import upload as d, types as t, deal_status, get_uploads as getUploads
6+
from .functions import upload as d, types as t, deal_status, get_uploads as getUploads, download as _download
77

88

99
class Lighthouse:
@@ -14,19 +14,19 @@ def __init__(self, token: str = ""):
1414
"No token provided: Please provide a token or set the LIGHTHOUSE_TOKEN environment variable"
1515
)
1616

17-
def upload(self, source: str) -> t.Upload:
17+
def upload(self, source: str, tag: str = '') -> t.Upload:
1818
"""
1919
Upload a file or directory to the Lighthouse.
2020
2121
:param source: str, path to file or directory
2222
:return: t.Upload, the upload result
2323
"""
2424
try:
25-
return d.upload(source, self.token)
25+
return d.upload(source, self.token, tag)
2626
except Exception as e:
2727
raise e
2828

29-
def uploadBlob(self, source: io.BufferedReader, filename: str) -> t.Upload:
29+
def uploadBlob(self, source: io.BufferedReader, filename: str, tag: str = '') -> t.Upload:
3030
"""
3131
Upload Blob a file or directory to the Lighthouse.
3232
@@ -36,7 +36,40 @@ def uploadBlob(self, source: io.BufferedReader, filename: str) -> t.Upload:
3636
if not (hasattr(source, 'read') and hasattr(source, 'close')):
3737
raise TypeError("source must have 'read' and 'close' methods")
3838
try:
39-
return d.uploadBlob(source, filename, self.token)
39+
return d.uploadBlob(source, filename, self.token, tag)
40+
except Exception as e:
41+
raise e
42+
43+
@staticmethod
44+
def downloadBlob(dist: io.BufferedWriter, cid: str, chunk_size=1024*1024*10) -> t.Upload:
45+
"""
46+
Download a Blob (file or directory) from the Lighthouse.
47+
48+
:param dist: BufferedWriter, destination to write the downloaded data
49+
:param cid: str, Content Identifier for the data to be downloaded
50+
:param chunk_size: int, size of chunks in which the file will be downloaded (default: 10MB)
51+
:param useCidAsTag: bool, flag to use CID as a tag (default: False)
52+
:return: t.Upload, the download result
53+
"""
54+
if not (hasattr(dist, 'read') and hasattr(dist, 'close')):
55+
raise TypeError("source must have 'read' and 'close' methods")
56+
try:
57+
return _download.download_file_into_writable(cid, dist, chunk_size)
58+
except Exception as e:
59+
raise e
60+
61+
@staticmethod
62+
def downloadBlob(dist: io.BufferedWriter, cid: str, chunk_size=1024*1024*10) -> t.Upload:
63+
"""
64+
Download Blob a file or directory to the Lighthouse.
65+
66+
:param source: str, path to file or directory
67+
:return: t.Upload, the upload result
68+
"""
69+
if not (hasattr(dist, 'read') and hasattr(dist, 'close')):
70+
raise TypeError("source must have 'read' and 'close' methods")
71+
try:
72+
return _download.download_file_into_writable(cid, dist, chunk_size)
4073
except Exception as e:
4174
raise e
4275

@@ -66,3 +99,30 @@ def getUploads(publicKey: str, pageNo: int = 1) -> List[t.DealData]:
6699
return getUploads.get_uploads(publicKey, pageNo)
67100
except Exception as e:
68101
raise e
102+
103+
@staticmethod
104+
def download(cid: str) -> bytes:
105+
"""
106+
Download content from the Lighthouse using its Content Identifier (CID).
107+
108+
:param cid: str, Content Identifier for the data to be downloaded
109+
:param useCidAsTag: bool, flag to use CID as a tag (default: False)
110+
:return: bytes, the downloaded content
111+
"""
112+
try:
113+
return _download.get_file(cid)
114+
except Exception as e:
115+
raise e
116+
117+
def getTagged(self, tag: str) -> t.Upload:
118+
"""
119+
Retrieve an upload from the Lighthouse using its tag.
120+
121+
:param tag: str, tag associated with the file or directory
122+
:return: t.Upload, the upload result
123+
"""
124+
try:
125+
return _download.getTaggedCid(tag, self.token)
126+
except Exception as e:
127+
raise e
128+

src/lighthouseweb3/functions/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@ class Config:
88
lighthouse_api = 'https://api.lighthouse.storage'
99
lighthouse_node = "https://node.lighthouse.storage"
1010
lighthouse_bls_node = "https://encryption.lighthouse.storage"
11+
lighthouse_gateway = "https://gateway.lighthouse.storage/ipfs"

src/lighthouseweb3/functions/upload.py

Lines changed: 23 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from . import types as t
99

1010

11-
def upload(source: str | BufferedReader | NamedBufferedReader, token: str) -> t.Upload:
11+
def upload(source: str | BufferedReader | NamedBufferedReader, token: str, tag: str = "") -> t.Upload:
1212
"""
1313
Deploy a file or directory to the lighthouse network
1414
@params {source}: str, path to file or directory
@@ -40,15 +40,24 @@ def upload(source: str | BufferedReader | NamedBufferedReader, token: str) -> t.
4040
file_dict["files"] = [source]
4141
file_dict["is_dir"] = False
4242
file_dict["path"] = source
43-
return {"data": axios.post_files(file_dict, headers)}
43+
hashData = axios.post_files(file_dict, headers)
4444
else:
45-
return {"data": axios.post_blob(source, source.name, headers)}
45+
hashData = axios.post_blob(source, source.name, headers)
46+
47+
if len(tag):
48+
_axios = Axios(Config.lighthouse_api + "/api/user/create_tag")
49+
data = _axios.post({
50+
"tag": tag,
51+
"cid": hashData.get("Hash")
52+
}, {
53+
"Authorization": f"Bearer {token}", })
54+
return {"data": hashData}
4655
except Exception as e:
4756
print(e)
4857
raise e
4958

5059

51-
def uploadBlob(source: BufferedReader, filename: str, token: str) -> t.Upload:
60+
def uploadBlob(source: BufferedReader, filename: str, token: str, tag: str = "") -> t.Upload:
5261
"""
5362
Upload a Buffer or readable Object
5463
@params {source}: str, path to file or directory
@@ -65,7 +74,16 @@ def uploadBlob(source: BufferedReader, filename: str, token: str) -> t.Upload:
6574
# create http object
6675
axios = Axios(Config.lighthouse_node + "/api/v0/add")
6776
# create list of files to upload
68-
return {"data": axios.post_blob(source, filename, headers)}
77+
78+
hashData = axios.post_blob(source, filename, headers)
79+
if len(tag):
80+
_axios = Axios(Config.lighthouse_api + "/api/user/create_tag")
81+
data = _axios.post({
82+
"tag": tag,
83+
"cid": hashData.get("Hash")
84+
}, {
85+
"Authorization": f"Bearer {token}", })
86+
return {"data": hashData}
6987
except Exception as e:
7088
print(e)
7189
raise e

tests/test_deal_status.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def test_deal_status(self):
1414
res = Lighthouse.getDealStatus(
1515
"QmT9shXpKcn4HRbJhXJ1ZywzwjEo2QWbxAx4SVgW4eYKjG")
1616
self.assertIsInstance(res, list, "data is a list")
17-
self.assertNotEqual(res[0].get(
17+
self.assertIsInstance(res[0].get(
1818
"dealId"), int, "dealId is Int")
1919

2020
def test_deal_status_init(self):
@@ -24,5 +24,5 @@ def test_deal_status_init(self):
2424
res = l.getDealStatus(
2525
"QmT9shXpKcn4HRbJhXJ1ZywzwjEo2QWbxAx4SVgW4eYKjG")
2626
self.assertIsInstance(res, list, "data is a list")
27-
self.assertNotEqual(res[0].get(
27+
self.assertIsInstance(res[0].get(
2828
"dealId"), int, "dealId is Int")

tests/test_download.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
#!/usr/bin/env python3
2+
import os
3+
import io
4+
import unittest
5+
from src.lighthouseweb3 import Lighthouse
6+
from src.lighthouseweb3.functions.utils import NamedBufferedReader
7+
from .setup import parse_env
8+
import string
9+
import secrets
10+
11+
12+
def generate_random_string(length: int) -> str:
13+
characters = string.ascii_letters + string.digits
14+
return ''.join(secrets.choice(characters) for _ in range(length))
15+
16+
17+
class TestDownload(unittest.TestCase):
18+
def setUp(self) -> None:
19+
"""setup test environment"""
20+
parse_env()
21+
22+
def test_env(self):
23+
"""test env var"""
24+
self.assertNotEqual(
25+
os.environ.get("LIGHTHOUSE_TOKEN"), None, "token is not None"
26+
)
27+
28+
def test_download_file(self):
29+
"""test Upload function"""
30+
l = Lighthouse() # will use env var
31+
res, _ = l.download(
32+
"Qmd5MBBScDUV3Ly8qahXtZFqyRRfYSmUwEcxpYcV4hzKfW")
33+
self.assertIsInstance(res, bytes, "type doesn't match")
34+
self.assertEqual(res, b'tests/testdir/', "data doesn't match")
35+
36+
def test_download_blob_file(self):
37+
"""test download_blob function"""
38+
l = Lighthouse(os.environ.get("LIGHTHOUSE_TOKEN"))
39+
with open("./image.png", "wb") as file:
40+
res = l.downloadBlob(
41+
file, "QmPT11PFFQQD3mT6BdwfSHQGHRdF8ngmRmcvxtSBiddWEa", chunk_size=1024*100)
42+
self.assertEqual(res.get("data").get("Size"),
43+
123939, "File Size dont match")
44+
45+
46+
47+
if __name__ == "__main__":
48+
unittest.main()

tests/test_upload.py

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,22 +25,25 @@ def test_env(self):
2525
os.environ.get("LIGHTHOUSE_TOKEN"), None, "token is not None"
2626
)
2727

28-
def test_Upload_file(self):
28+
def test_upload_file(self):
2929
"""test Upload function"""
3030
l = Lighthouse() # will use env var
3131
res = l.upload("tests/testdir/testfile.txt")
3232
self.assertNotEqual(res.get("data"), None, "data is None")
3333
self.assertNotEqual(res.get("data").get("Hash"), None, "data is None")
3434

35-
def test_Upload_dir(self):
35+
def test_upload_dir(self):
3636
"""test Upload function"""
3737
l = Lighthouse(os.environ.get("LIGHTHOUSE_TOKEN"))
38-
res = l.upload("tests/testdir/")
38+
res = l.upload("tests/")
3939
self.assertNotEqual(res.get("data"), None, "data is None")
4040
self.assertIsInstance(res.get("data"), dict, "data is a dict")
41-
self.assertNotEqual(res.get("data").get("Hash"), None, "data is None")
41+
self.assertIsInstance(res.get("data").get(
42+
"Hash"), str, "Instance is not of type String")
43+
self.assertIsInstance(res.get("data").get(
44+
"Size"), str, "Instance is not of type String")
4245

43-
def test_Upload_Blob(self):
46+
def test_upload_Blob(self):
4447
"""test Upload function"""
4548
l = Lighthouse(os.environ.get("LIGHTHOUSE_TOKEN"))
4649
res = l.uploadBlob(
@@ -49,7 +52,7 @@ def test_Upload_Blob(self):
4952
self.assertIsInstance(res.get("data"), dict, "data is a dict")
5053
self.assertNotEqual(res.get("data").get("Hash"), None, "data is None")
5154

52-
def test_Upload_File(self):
55+
def test_upload_File(self):
5356
"""test Upload function"""
5457
l = Lighthouse(os.environ.get("LIGHTHOUSE_TOKEN"))
5558
with open("./.gitignore", "rb") as file:
@@ -59,6 +62,30 @@ def test_Upload_File(self):
5962
self.assertNotEqual(res.get("data").get(
6063
"Hash"), None, "data is None")
6164

65+
def test_upload_with_tag(self):
66+
"""test Upload with tag function"""
67+
l = Lighthouse(os.environ.get("LIGHTHOUSE_TOKEN"))
68+
tag = generate_random_string(8)
69+
res = l.uploadBlob(
70+
io.BytesIO(b"tests/testdir/"+generate_random_string(80).encode("utf-8")), f"{generate_random_string(16)}.txt", tag)
71+
self.assertNotEqual(res.get("data"), None, "data is None")
72+
self.assertIsInstance(res.get("data"), dict, "data is a dict")
73+
self.assertIsInstance(res.get("data").get(
74+
"Hash"), str, "Hash is not of type string")
75+
76+
tagData = l.getTagged(tag)
77+
self.assertEqual(tag, tagData.get("data").get("tag"), "Tag dont match")
78+
self.assertEqual(res.get("data").get("Hash"), tagData.get(
79+
"data").get("cid"), "Tag dont match")
80+
81+
# overWrite tag
82+
res = l.uploadBlob(
83+
io.BytesIO(b"tests/testdir/"+generate_random_string(80).encode("utf-8")), f"{generate_random_string(16)}.txt", tag)
84+
tagData = l.getTagged(tag)
85+
self.assertEqual(tag, tagData.get("data").get("tag"), "Tag dont match")
86+
self.assertEqual(res.get("data").get("Hash"), tagData.get(
87+
"data").get("cid"), "Tag dont match")
88+
6289

6390
if __name__ == "__main__":
6491
unittest.main()

0 commit comments

Comments
 (0)