Skip to content

Commit 6418205

Browse files
committed
Generate labels zip folder structure for collection.
1 parent 7fd5386 commit 6418205

File tree

4 files changed

+226
-51
lines changed

4 files changed

+226
-51
lines changed

app.py

Lines changed: 115 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,13 @@
1919
from core.images import precompute_location_images, resolve_part_image
2020
from core.colors import load_colors, build_color_lookup, render_color_cell
2121
from core.auth import AuthManager
22+
from core.labels import organize_labels_by_location
2223

2324
# ---------------------------------------------------------------------
2425
# --- Page setup
2526
# ---------------------------------------------------------------------
2627
st.set_page_config(page_title="Rebrickable Storage - Parts Finder", layout="wide")
27-
28-
#st.title("🧱 Rebrickable Storage - Parts Finder")
29-
#st.markdown("### Welcome! Please login or register to continue.")
28+
st.title("🧱 Rebrickable Storage - Parts Finder")
3029

3130
# ---------------------------------------------------------------------
3231
# --- Authentication Setup (FULL PATCH)
@@ -40,24 +39,18 @@
4039

4140
auth_manager = st.session_state.auth_manager
4241

43-
# -------------------------------------------------
44-
# 1) Attempt silent cookie login BEFORE any UI
45-
# -------------------------------------------------
42+
# Attempt silent cookie login BEFORE any UI
4643
auth_manager.authenticator.login(
4744
location="unrendered",
4845
max_login_attempts=0 # suppress login form → cookie-only check
4946
)
5047

51-
# -------------------------------------------------
52-
# 2) Read authentication state
53-
# -------------------------------------------------
48+
# Read authentication state
5449
auth_status = st.session_state.get("authentication_status", None)
5550
name = st.session_state.get("name", None)
5651
username = st.session_state.get("username", None)
5752

58-
# -------------------------------------------------
59-
# 3) Evaluate authentication result
60-
# -------------------------------------------------
53+
# Evaluate authentication result
6154
if auth_status is True:
6255
# Authenticated via cookie or fresh login
6356
pass
@@ -68,18 +61,13 @@
6861
st.stop()
6962

7063
else:
71-
# -------------------------------------------------
72-
# 4) No cookie → Show Login + Registration UI
73-
# -------------------------------------------------
74-
st.title("🧱 Rebrickable Storage - Parts Finder")
64+
# No cookie → Show Login + Registration UI
7565
st.markdown("### Welcome! Please login or register to continue.")
7666

7767
tab1, tab2 = st.tabs(["Login", "Register"])
78-
7968
with tab1:
8069
# Render login form (no return value needed)
8170
auth_manager.authenticator.login(location="main")
82-
8371
with tab2:
8472
auth_manager.register_user()
8573

@@ -89,6 +77,10 @@
8977
# 5) Authenticated area
9078
# -------------------------------------------------
9179
if auth_status is True:
80+
# Define user collection directory for use throughout the app
81+
user_collection_dir = paths.user_data_dir / username / "collection"
82+
user_collection_dir.mkdir(parents=True, exist_ok=True)
83+
9284
with st.sidebar:
9385
display_name = st.session_state.get("name", username)
9486
st.write(f"👤 Welcome, **{display_name}**!")
@@ -123,9 +115,6 @@
123115
# Collection default folder
124116
with st.expander("🗂️ Collection default"):
125117
# Will only execute if authenticated → username not None
126-
user_collection_dir = paths.user_data_dir / username / "collection"
127-
user_collection_dir.mkdir(parents=True, exist_ok=True)
128-
129118
uploaded_files_list = st.file_uploader(
130119
"Upload Collection CSVs",
131120
type=["csv"],
@@ -136,7 +125,6 @@
136125
manage_default_collection(user_collection_dir)
137126

138127

139-
140128
# APPLY THEME
141129
st.session_state["theme"] = "dark-enhanced"
142130
# Always apply dark CSS on load
@@ -149,11 +137,9 @@
149137
if st.session_state["theme"] == "dark-enhanced":
150138
apply_dark_theme()
151139

152-
# Set Title
153-
st.title("🧱 Rebrickable Storage - Parts Finder")
154-
155140
# --- Base path resolution (cross-platform)
156141
CACHE_IMAGES_DIR = paths.cache_images
142+
CACHE_LABELS_DIR = paths.cache_labels
157143
RESOURCES_DIR = paths.resources_dir
158144
DEFAULT_COLLECTION_DIR = paths.default_collection_dir # Common collection directory
159145
MAPPING_PATH = paths.mapping_path
@@ -165,9 +151,6 @@
165151
# --- Mapping file
166152
ba_mapping = load_ba_mapping(MAPPING_PATH)
167153

168-
#if st.session_state["ba_mapping"] is None:
169-
# load_ba_mapping(MAPPING_PATH)
170-
171154
# --- Color Lookup
172155
colors_df = load_colors(COLORS_PATH)
173156
color_lookup = build_color_lookup(colors_df)
@@ -178,7 +161,6 @@
178161
# ---------------------------------------------------------------------
179162
# --- File upload section
180163
# ---------------------------------------------------------------------
181-
#col1, col2, col3 = st.columns(3)
182164
col1, col2 = st.columns(2)
183165
with col1:
184166
st.markdown("### 🗂️ Wanted parts: Upload")
@@ -196,29 +178,119 @@
196178
uploaded_collection_files = st.file_uploader("Upload Collection CSVs", type=["csv"], accept_multiple_files=True)
197179

198180
collection_files_stream = []
181+
collection_file_paths = []
182+
183+
# Add selected files from default collection
199184
for f in selected_files:
200-
collection_files_stream.append(open(f, "rb"))
185+
collection_file_paths.append(f)
186+
# Open file handle for streamlit processing
187+
file_handle = open(f, "rb")
188+
collection_files_stream.append(file_handle)
189+
190+
# Add uploaded files
201191
if uploaded_collection_files:
202192
collection_files_stream.extend(uploaded_collection_files)
193+
# Store paths for uploaded files (they're in memory, so we'll handle differently)
194+
for uploaded_file in uploaded_collection_files:
195+
collection_file_paths.append(uploaded_file)
196+
197+
st.markdown("---")
198+
col1, col2 = st.columns(2)
199+
with col1:
200+
# ---------------------------------------------------------------------
201+
# --- Start Wanted Parts Processing Button
202+
# ---------------------------------------------------------------------
203+
if wanted_files and collection_files_stream:
204+
st.markdown("### ▶️ Find wanted parts in collection")
205+
st.markdown("Process the wanted parts and collection lists, create a table with wanted parts per location in collection.")
206+
if st.button("🚀 Start generating pickup list"):
207+
st.session_state["start_processing"] = True
208+
else:
209+
st.info("📤 Upload at least one Wanted and one Collection file to begin.")
210+
st.session_state["start_processing"] = False
211+
212+
with col2:
213+
# ---------------------------------------------------------------------
214+
# --- Labels Organization Section
215+
# ---------------------------------------------------------------------
216+
if collection_files_stream:
217+
st.markdown("### 🏷️ Generate Labels by Location")
218+
st.markdown("Create a downloadable zip file with label images organized by location from your collection files.")
219+
220+
if st.button("📦 Generate Labels Zip File", key="generate_labels"):
221+
with st.spinner("Organizing labels by location..."):
222+
try:
223+
# Prepare collection files for labels generation
224+
# Reset file handles to beginning if they're file objects
225+
labels_collection_stream = []
226+
for f in collection_files_stream:
227+
if hasattr(f, 'seek'):
228+
f.seek(0)
229+
labels_collection_stream.append(f)
230+
231+
# Load collection files for labels generation
232+
collection_for_labels = load_collection_files(labels_collection_stream)
233+
234+
# Generate labels zip
235+
zip_bytes, stats = organize_labels_by_location(
236+
collection_for_labels,
237+
ba_mapping,
238+
CACHE_LABELS_DIR
239+
)
240+
241+
if zip_bytes and stats['locations_count'] > 0:
242+
st.success(f"✅ Successfully generated labels zip file!")
243+
st.info(
244+
f"**Statistics:**\n"
245+
f"- Locations: {stats['locations_count']}\n"
246+
f"- Parts processed: {stats['total_parts_processed']}\n"
247+
f"- Labels copied: {stats['files_copied_count']}\n"
248+
f"- Missing labels: {stats['missing_labels_count']}"
249+
)
250+
251+
if stats['missing_labels_count'] > 0:
252+
with st.expander("⚠️ View missing labels"):
253+
missing_list = stats['missing_labels_list']
254+
st.text("\n".join(missing_list))
255+
if stats['missing_labels_count'] > 20:
256+
st.text(f"... and {stats['missing_labels_count'] - 20} more")
257+
258+
# Store zip bytes in session state for download
259+
st.session_state["labels_zip_bytes"] = zip_bytes
260+
st.session_state["labels_zip_filename"] = f"labels_by_location_{pd.Timestamp.now().strftime('%Y%m%d_%H%M%S')}.zip"
261+
262+
st.rerun()
263+
else:
264+
if stats['locations_count'] == 0:
265+
st.warning("No locations found in collection files. Please ensure your collection files contain 'Location' column with valid location names.")
266+
else:
267+
st.error("Failed to generate zip file. Please check that collection files contain valid data.")
268+
except Exception as e:
269+
st.error(f"Error generating labels: {e}")
270+
import traceback
271+
st.code(traceback.format_exc())
272+
273+
# Display download button if zip file is ready
274+
if st.session_state.get("labels_zip_bytes"):
275+
st.download_button(
276+
"⬇️ Download Labels Zip File",
277+
st.session_state["labels_zip_bytes"],
278+
st.session_state.get("labels_zip_filename", "labels_by_location.zip"),
279+
mime="application/zip",
280+
key="download_labels_zip"
281+
)
282+
if st.button("🗑️ Clear Labels Zip", key="clear_labels_zip"):
283+
st.session_state.pop("labels_zip_bytes", None)
284+
st.session_state.pop("labels_zip_filename", None)
285+
st.rerun()
203286

204-
# ---------------------------------------------------------------------
205-
# --- Start Processing Button
206-
# ---------------------------------------------------------------------
207-
if wanted_files and collection_files_stream:
208-
st.markdown("### ▶️ Ready to process")
209-
if st.button("🚀 Start generating pickup list"):
210-
st.session_state["start_processing"] = True
211-
else:
212-
st.info("📤 Upload at least one Wanted and one Collection file to begin.")
213-
st.session_state["start_processing"] = False
214287

215288
# ---------------------------------------------------------------------
216-
# --- MAIN PROCESSING LOGIC
289+
# --- MAIN WANTED PARTS PROCESSING LOGIC
217290
# ---------------------------------------------------------------------
218291
if st.session_state.get("start_processing"):
219292

220-
with st.spinner("Processing Collection & Wanted parts..."):
221-
293+
with st.spinner("Processing Collection & Wanted parts..."):
222294
try:
223295
wanted = load_wanted_files(wanted_files)
224296
collection = load_collection_files(collection_files_stream)
@@ -243,11 +315,6 @@ def _df_bytes(df):
243315
collection_bytes = _df_bytes(collection)
244316

245317
with st.spinner("Computing image locations..."):
246-
#ba_mapping = load_ba_mapping(MAPPING_PATH)
247-
#ba_mapping = st.session_state.get("ba_mapping")
248-
#if st.session_state["ba_mapping"] is None:
249-
# st.error("Error: BA mapping not present!")
250-
251318
images_index = precompute_location_images(collection_bytes, ba_mapping, CACHE_IMAGES_DIR)
252319
st.session_state["locations_index"] = images_index
253320
st.write("Status: Loaded image locations for parts.")
@@ -272,11 +339,9 @@ def _df_bytes(df):
272339
""", unsafe_allow_html=True)
273340

274341
colA, colB = st.columns([1, 1])
275-
276342
with colA:
277343
if st.button("Open ▼", key=short_key("open", location), help="Show this location", use_container_width=False):
278344
st.session_state["expanded_loc"] = location
279-
280345
with colB:
281346
if st.button("Close ▶", key=short_key("close", location), help="Hide this location", use_container_width=False):
282347
if st.session_state.get("expanded_loc") == location:
128 Bytes
Binary file not shown.

core/labels.py

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
# core/labels.py
2+
import os
3+
import shutil
4+
import pandas as pd
5+
import re
6+
import tempfile
7+
import zipfile
8+
from pathlib import Path
9+
from typing import Tuple
10+
from io import BytesIO
11+
12+
def organize_labels_by_location(
13+
collection_df: pd.DataFrame,
14+
ba_mapping: dict,
15+
labels_source_dir: Path
16+
) -> Tuple[bytes, dict]:
17+
"""
18+
Organize label files (.lbx) by location based on collection CSV data.
19+
20+
Args:
21+
collection_df: DataFrame with columns 'Part' (RB part number) and 'Location'
22+
ba_mapping: Dictionary mapping RB part numbers to BA part numbers
23+
labels_source_dir: Directory containing label files (.lbx)
24+
25+
Returns:
26+
Tuple of (zip_file_bytes, stats_dict) where stats_dict contains:
27+
- total_parts_processed: int
28+
- files_copied_count: int
29+
- locations_count: int
30+
- missing_labels_count: int
31+
- missing_labels_list: list of missing label filenames
32+
"""
33+
# Column names
34+
LOC_PART_COL = 'Part'
35+
LOC_LOCATION_COL = 'Location'
36+
37+
# Build location-to-BA-parts mapping
38+
location_to_ba_parts = {}
39+
missing_labels = set()
40+
total_parts_processed = 0
41+
files_copied_count = 0
42+
43+
for _, row in collection_df.iterrows():
44+
location = str(row[LOC_LOCATION_COL]).strip()
45+
rb_part = str(row[LOC_PART_COL]).strip()
46+
47+
if not location or pd.isna(location) or location == 'nan':
48+
continue
49+
50+
# Map RB part to BA part
51+
ba_part = ba_mapping.get(rb_part)
52+
53+
if ba_part:
54+
if location not in location_to_ba_parts:
55+
location_to_ba_parts[location] = set()
56+
location_to_ba_parts[location].add(ba_part)
57+
58+
# Create temporary directory for organizing files
59+
with tempfile.TemporaryDirectory() as temp_dir:
60+
temp_path = Path(temp_dir)
61+
output_base_dir = temp_path / 'locations'
62+
output_base_dir.mkdir(exist_ok=True)
63+
64+
# Create folders and copy files
65+
for location, ba_parts_set in location_to_ba_parts.items():
66+
# Sanitize location name for use as a directory name
67+
sanitized_location_name = re.sub(r'[\\/|:*?"<>]', '_', location)
68+
location_dir = output_base_dir / sanitized_location_name
69+
location_dir.mkdir(parents=True, exist_ok=True)
70+
71+
for ba_part in ba_parts_set:
72+
total_parts_processed += 1
73+
label_filename = f"{ba_part}.lbx"
74+
source_file_path = labels_source_dir / label_filename
75+
dest_file_path = location_dir / label_filename
76+
77+
if source_file_path.exists():
78+
try:
79+
shutil.copy2(source_file_path, dest_file_path)
80+
files_copied_count += 1
81+
except Exception as e:
82+
# Log error but continue
83+
pass
84+
else:
85+
missing_labels.add(label_filename)
86+
87+
# Create zip file
88+
zip_buffer = BytesIO()
89+
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf:
90+
for root, dirs, files in os.walk(output_base_dir):
91+
for file in files:
92+
file_path = Path(root) / file
93+
# Create relative path within zip
94+
arcname = file_path.relative_to(output_base_dir.parent)
95+
zipf.write(file_path, arcname)
96+
97+
zip_buffer.seek(0)
98+
zip_bytes = zip_buffer.read()
99+
100+
stats = {
101+
'total_parts_processed': total_parts_processed,
102+
'files_copied_count': files_copied_count,
103+
'locations_count': len(location_to_ba_parts),
104+
'missing_labels_count': len(missing_labels),
105+
'missing_labels_list': list(missing_labels)[:20] # First 20 for display
106+
}
107+
108+
return zip_bytes, stats
109+

0 commit comments

Comments
 (0)