Skip to content

Commit 0b43812

Browse files
committed
Remove unused functions
1 parent 7a1c07f commit 0b43812

File tree

2 files changed

+2
-21
lines changed

2 files changed

+2
-21
lines changed

app/backend/prepdocslib/filestrategy.py

Lines changed: 1 addition & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ async def parse_file(
2626
pages = [page async for page in processor.parser.parse(content=file.content)]
2727
logger.info("Splitting '%s' into sections", file.filename())
2828
if image_embeddings:
29-
logger.info("Each page will be split into smaller chunks of text, but images will be of the entire page.")
29+
logger.warning("Each page will be split into smaller chunks of text, but images will be of the entire page.")
3030
sections = [
3131
Section(split_page, content=file, category=category) for split_page in processor.splitter.split_pages(pages)
3232
]
@@ -121,25 +121,6 @@ async def run(self):
121121
await self.blob_manager.remove_blob()
122122
await search_manager.remove_content()
123123

124-
async def process_file(self, file, search_manager):
125-
try:
126-
sections = await parse_file(file, self.file_processors, self.category, self.image_embeddings)
127-
if sections:
128-
blob_sas_uris = await self.blob_manager.upload_blob(file)
129-
blob_image_embeddings: Optional[List[List[float]]] = None
130-
if self.image_embeddings and blob_sas_uris:
131-
blob_image_embeddings = await self.image_embeddings.create_embeddings(blob_sas_uris)
132-
await search_manager.update_content(
133-
sections=sections, file=file, image_embeddings=blob_image_embeddings
134-
)
135-
finally:
136-
if file:
137-
file.close()
138-
139-
async def remove_file(self, path, search_manager):
140-
await self.blob_manager.remove_blob(path)
141-
await search_manager.remove_content(path)
142-
143124

144125
class UploadUserFileStrategy:
145126
"""

app/backend/prepdocslib/htmlparser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ async def parse(self, content: IO) -> AsyncGenerator[Page, None]:
3838
Returns:
3939
Page: The parsed html Page.
4040
"""
41-
logger.debug("Extracting text from '%s' using local HTML parser (BeautifulSoup)", content.name)
41+
logger.info("Extracting text from '%s' using local HTML parser (BeautifulSoup)", content.name)
4242

4343
data = content.read()
4444
soup = BeautifulSoup(data, "html.parser")

0 commit comments

Comments
 (0)