Compare commits

..

12 Commits

282 changed files with 2012 additions and 13185 deletions
+5 -8
View File
@@ -8,17 +8,11 @@ static/sets
Dockerfile
compose.yaml
# Local data directories
local/
offline/
data/
# Documentation
docs/
LICENSE
*.md
*.sample
.code-workspace
# Temporary
*.csv
@@ -32,8 +26,11 @@ LICENSE
**/__pycache__
*.pyc
# Hidden directories
.?*
# Git
.git
# IDE
.vscode
# Dev
test-server.sh
+64 -138
View File
@@ -1,23 +1,3 @@
# ================================================================================================
# BrickTracker Configuration File
# ================================================================================================
#
# FILE LOCATION (v1.3+):
# ----------------------
# This file can be placed in two locations:
# 1. data/.env (RECOMMENDED) - Included in data volume backup, settings persist via admin panel
# 2. .env (root) - Backward compatible
#
# Priority: data/.env > .env (root)
#
# The application automatically detects and uses the correct location at runtime.
#
# For Docker:
# - Recommended: Place this file as data/.env (included in data volume)
# - Backward compatible: Keep as .env in root (add "env_file: .env" to compose.yaml)
#
# ================================================================================================
#
# Note on *_DEFAULT_ORDER
# If set, it will append a direct ORDER BY <whatever you set> to the SQL query
# while listing objects. You can look at the structure of the SQLite database to
@@ -61,11 +41,11 @@
# Default: false
# BK_BRICKLINK_LINKS=true
# Optional: Path to the database, relative to '/app/' folder
# Optional: Path to the database.
# Useful if you need it mounted in a Docker volume. Keep in mind that it will not
# do any check on the existence of the path, or if it is dangerous.
# Default: data/app.db
# BK_DATABASE_PATH=data/app.db
# Default: ./app.db
# BK_DATABASE_PATH=/var/lib/bricktracker/app.db
# Optional: Format of the timestamp added to the database file when downloading it
# Check https://docs.python.org/3/library/time.html#time.strftime for format details
@@ -81,6 +61,10 @@
# Default: 25
# BK_DEFAULT_TABLE_PER_PAGE=50
# Optional: Maximum length for description text in badges before truncating with ellipsis
# Default: 15
# BK_DESCRIPTION_BADGE_MAX_LENGTH=15
# Optional: if set up, will add a CORS allow origin restriction to the socket.
# Default:
# Legacy name: DOMAIN_NAME
@@ -106,9 +90,9 @@
# Default: .pdf
# BK_INSTRUCTIONS_ALLOWED_EXTENSIONS=.pdf, .docx, .png
# Optional: Folder where to store the instructions, relative to '/app/' folder
# Default: data/instructions
# BK_INSTRUCTIONS_FOLDER=data/instructions
# Optional: Folder where to store the instructions, relative to the '/app/static/' folder
# Default: instructions
# BK_INSTRUCTIONS_FOLDER=/var/lib/bricktracker/instructions/
# Optional: Hide the 'Add' entry from the menu. Does not disable the route.
# Default: false
@@ -138,6 +122,13 @@
# Default: false
# BK_HIDE_ALL_MINIFIGURES=true
# Optional: Disable the individual/loose minifigures system. This hides all individual
# minifigure UI elements and prevents adding new individual minifigures. The routes remain
# accessible so existing individual minifigures can still be viewed. Users who only track
# set-based minifigures can use this to simplify the interface. Does not disable the route.
# Default: false
# BK_DISABLE_INDIVIDUAL_MINIFIGURES=false
# Optional: Hide the 'Parts' entry from the menu. Does not disable the route.
# Default: false
# BK_HIDE_ALL_PARTS=true
@@ -179,38 +170,20 @@
# Default: false
# BK_HIDE_WISHES=true
# Optional: Hide the 'Individual Minifigures' entry from the menu. Does not disable the route.
# Default: false
# BK_HIDE_INDIVIDUAL_MINIFIGURES=true
# Optional: Hide the 'Individual Parts' entry from the menu. Does not disable the route.
# Default: false
# BK_HIDE_INDIVIDUAL_PARTS=true
# Optional: Hide the 'Add to individual parts' quick-add buttons in parts tables.
# The column header with menu options (mark all missing, check all, etc.) remains visible.
# Default: false
# BK_HIDE_QUICK_ADD_INDIVIDUAL_PARTS=true
# Optional: Change the default order of minifigures. By default ordered by insertion order.
# Note: Minifigures are queried from a combined view that merges both set-based and individual minifigures.
# Therefore, column references should use the "combined" table alias.
# Useful column names for this option are:
# - "rebrickable_minifigures"."figure": minifigure ID (e.g., "fig-001234")
# - "rebrickable_minifigures"."number": minifigure ID as an integer (e.g., 1234)
# - "rebrickable_minifigures"."name": minifigure name
# - "rebrickable_minifigures"."number_of_parts": number of parts in the minifigure
# - "bricktracker_minifigures"."quantity": quantity owned
# - "total_missing": number of missing parts (composite field)
# - "total_damaged": number of damaged parts (composite field)
# - "total_quantity": total quantity across all sets (composite field)
# - "total_sets": number of sets containing this minifigure (composite field)
# Default: "rebrickable_minifigures"."name" ASC
# Examples:
# BK_MINIFIGURES_DEFAULT_ORDER="rebrickable_minifigures"."number" DESC
# BK_MINIFIGURES_DEFAULT_ORDER="total_missing" DESC, "rebrickable_minifigures"."name" ASC
# - "combined"."figure": minifigure ID (fig-xxxxx)
# - "combined"."number": minifigure ID as an integer (xxxxx)
# - "combined"."name": minifigure name
# - "combined"."rowid": insertion order (for both set and individual minifigures)
# Default: "combined"."name" ASC
# BK_MINIFIGURES_DEFAULT_ORDER="combined"."name" ASC
# Optional: Folder where to store the minifigures images, relative to '/app/' folder
# Default: data/minifigures
# BK_MINIFIGURES_FOLDER=data/minifigures
# Optional: Folder where to store the minifigures images, relative to the '/app/static/' folder
# Default: minifigs
# BK_MINIFIGURES_FOLDER=minifigures
# Optional: Disable threading on the task executed by the socket.
# You should not need to change this parameter unless you are debugging something with the
@@ -219,27 +192,20 @@
# BK_NO_THREADED_SOCKET=true
# Optional: Change the default order of parts. By default ordered by insertion order.
# Note: Parts are queried from a combined view that merges both set-based and individual minifigure parts.
# Some columns use the "combined" table alias for fields from the merged view.
# Useful column names for this option are:
# - "combined"."part": part number (e.g., "3001")
# - "combined"."spare": part is a spare part (0 or 1)
# - "combined"."quantity": quantity of this part
# - "combined"."missing": number of missing parts
# - "combined"."damaged": number of damaged parts
# - "combined"."part": part number
# - "combined"."spare": part is a spare part (use "combined" not "bricktracker_parts")
# - "rebrickable_parts"."name": part name
# - "rebrickable_parts"."color_name": part color name
# - "total_missing": total missing across all sets (composite field)
# - "total_damaged": total damaged across all sets (composite field)
# - "total_quantity": total quantity across all sets (composite field)
# - "total_sets": number of sets containing this part (composite field)
# - "total_minifigures": number of minifigures with this part (composite field)
# - "total_missing": number of missing parts
# Default: "rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "combined"."spare" ASC
# Examples:
# BK_PARTS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_parts"."name" ASC
# BK_PARTS_DEFAULT_ORDER="rebrickable_parts"."color_name" ASC, "rebrickable_parts"."name" ASC
# Optional: Folder where to store the parts images, relative to '/app/' folder
# Default: data/parts
# BK_PARTS_FOLDER=data/parts
# Optional: Folder where to store the parts images, relative to the '/app/static/' folder
# Default: parts
# BK_PARTS_FOLDER=parts
# Optional: Enable server-side pagination for individual pages (recommended for large collections)
# When enabled, pages use server-side pagination with configurable page sizes
@@ -297,12 +263,9 @@
# Optional: Change the default order of purchase locations. By default ordered by insertion order.
# Useful column names for this option are:
# - "bricktracker_metadata_purchase_locations"."name": purchase location name
# - "bricktracker_metadata_purchase_locations"."rowid": insertion order (special column)
# - "bricktracker_metadata_purchase_locations"."name" ASC: storage name
# Default: "bricktracker_metadata_purchase_locations"."name" ASC
# Examples:
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."name" DESC
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."rowid" DESC
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."name" ASC
# Optional: Shuffle the lists on the front page.
# Default: false
@@ -318,23 +281,23 @@
# Optional: URL of the image representing a missing image in Rebrickable
# Default: https://rebrickable.com/static/img/nil.png
# BK_REBRICKABLE_IMAGE_NIL=https://rebrickable.com/static/img/nil.png
# BK_REBRICKABLE_IMAGE_NIL=
# Optional: URL of the image representing a missing minifigure image in Rebrickable
# Default: https://rebrickable.com/static/img/nil_mf.jpg
# BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE=https://rebrickable.com/static/img/nil_mf.jpg
# BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE=
# Optional: Pattern of the link to Rebrickable for a minifigure. Will be passed to Python .format()
# Default: https://rebrickable.com/minifigs/{figure}
# BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN=https://rebrickable.com/minifigs/{figure}
# BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN=
# Optional: Pattern of the link to Rebrickable for a part. Will be passed to Python .format()
# Default: https://rebrickable.com/parts/{part}/_/{color}
# BK_REBRICKABLE_LINK_PART_PATTERN=https://rebrickable.com/parts/{part}/_/{color}
# BK_REBRICKABLE_LINK_PART_PATTERN=
# Optional: Pattern of the link to Rebrickable for instructions. Will be passed to Python .format()
# Default: https://rebrickable.com/instructions/{path}
# BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN=https://rebrickable.com/instructions/{path}
# BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN=
# Optional: User-Agent to use when querying Rebrickable and Peeron outside of the Rebrick python library
# Default: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
@@ -380,33 +343,27 @@
# Default: https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date
# BK_RETIRED_SETS_FILE_URL=
# Optional: Path to the unofficial retired sets lists, relative to '/app/' folder
# Optional: Path to the unofficial retired sets lists
# You can name it whatever you want, but content has to be a CSV
# Default: data/retired_sets.csv
# BK_RETIRED_SETS_PATH=data/retired_sets.csv
# Default: ./retired_sets.csv
# BK_RETIRED_SETS_PATH=/var/lib/bricktracker/retired_sets.csv
# Optional: Change the default order of sets. By default ordered by insertion order.
# Useful column names for this option are:
# - "rebrickable_sets"."set": set number as a string (e.g., "10255-1")
# - "rebrickable_sets"."number": the number part of set as text (e.g., "10255")
# - "rebrickable_sets"."version": the version part of set as an integer (e.g., 1)
# - "rebrickable_sets"."set": set number as a string
# - "rebrickable_sets"."number": the number part of set as an integer
# - "rebrickable_sets"."version": the version part of set as an integer
# - "rebrickable_sets"."name": set name
# - "rebrickable_sets"."year": set release year
# - "rebrickable_sets"."number_of_parts": set number of parts
# - "bricktracker_sets"."purchase_date": purchase date (as REAL/Julian day)
# - "bricktracker_sets"."purchase_price": purchase price
# - "total_missing": number of missing parts (composite field)
# - "total_damaged": number of damaged parts (composite field)
# - "total_minifigures": number of minifigures (composite field)
# - "total_missing": number of missing parts
# - "total_minifigures": number of minifigures
# Default: "rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC
# Examples:
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."year" DESC, "rebrickable_sets"."name" ASC
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."number_of_parts" DESC
# BK_SETS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_sets"."year" ASC
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."year" ASC
# Optional: Folder where to store the sets images, relative to '/app/' folder
# Default: data/sets
# BK_SETS_FOLDER=data/sets
# Optional: Folder where to store the sets images, relative to the '/app/static/' folder
# Default: sets
# BK_SETS_FOLDER=sets
# Optional: Enable set consolidation/grouping on the main sets page
# When enabled, multiple copies of the same set are grouped together showing instance count
@@ -426,14 +383,10 @@
# Default: true
# BK_SHOW_SETS_DUPLICATE_FILTER=true
# Optional: Skip importing spare parts when downloading sets from Rebrickable
# Optional: Skip saving or displaying spare parts
# Default: false
# BK_SKIP_SPARE_PARTS=true
# Optional: Hide spare parts from parts lists (spare parts must still be in database)
# Default: false
# BK_HIDE_SPARE_PARTS=true
# Optional: Namespace of the Socket.IO socket
# Default: bricksocket
# BK_SOCKET_NAMESPACE=customsocket
@@ -444,21 +397,18 @@
# Optional: Change the default order of storages. By default ordered by insertion order.
# Useful column names for this option are:
# - "bricktracker_metadata_storages"."name": storage name
# - "bricktracker_metadata_storages"."rowid": insertion order (special column)
# - "bricktracker_metadata_storages"."name" ASC: storage name
# Default: "bricktracker_metadata_storages"."name" ASC
# Examples:
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."name" DESC
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."rowid" DESC
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."name" ASC
# Optional: URL to the themes.csv.gz on Rebrickable
# Default: https://cdn.rebrickable.com/media/downloads/themes.csv.gz
# BK_THEMES_FILE_URL=
# Optional: Path to the themes file, relative to '/app/' folder
# Optional: Path to the themes file
# You can name it whatever you want, but content has to be a CSV
# Default: data/themes.csv
# BK_THEMES_PATH=data/themes.csv
# Default: ./themes.csv
# BK_THEMES_PATH=/var/lib/bricktracker/themes.csv
# Optional: Timezone to use to display datetimes
# Check your system for available timezone/TZ values
@@ -470,19 +420,14 @@
# Default: false
# BK_USE_REMOTE_IMAGES=true
# Optional: Change the default order of wishlist sets. By default ordered by insertion order.
# Optional: Change the default order of sets. By default ordered by insertion order.
# Useful column names for this option are:
# - "bricktracker_wishes"."set": set number as a string (e.g., "10255-1")
# - "bricktracker_wishes"."set": set number as a string
# - "bricktracker_wishes"."name": set name
# - "bricktracker_wishes"."year": set release year
# - "bricktracker_wishes"."number_of_parts": set number of parts
# - "bricktracker_wishes"."theme_id": theme ID
# - "bricktracker_wishes"."rowid": insertion order (special column)
# Default: "bricktracker_wishes"."rowid" DESC
# Examples:
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."year" DESC, "bricktracker_wishes"."name" ASC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."number_of_parts" DESC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."set" ASC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."set" DESC
# Optional: Show collection growth charts on the statistics page
# Default: true
@@ -492,22 +437,3 @@
# When true, all sections start expanded. When false, all sections start collapsed.
# Default: true
# BK_STATISTICS_DEFAULT_EXPANDED=false
# Optional: Enable dark mode by default
# When true, the application starts in dark mode.
# Default: false
# BK_DARK_MODE=true
# Optional: Customize badge order for Grid view (set cards on /sets/)
# Comma-separated list of badge keys in the order they should appear
# Available badges: theme, tag, year, parts, instance_count, total_minifigures,
# total_missing, total_damaged, owner, storage, purchase_date, purchase_location,
# purchase_price, instructions, rebrickable, bricklink
# Default: theme,year,parts,total_minifigures,owner
# BK_BADGE_ORDER_GRID=theme,year,parts,total_minifigures,owner,storage
# Optional: Customize badge order for Detail view (individual set details page)
# Comma-separated list of badge keys in the order they should appear
# Use the same badge keys as BK_BADGE_ORDER_GRID
# Default: theme,tag,year,parts,instance_count,total_minifigures,total_missing,total_damaged,owner,storage,purchase_date,purchase_location,purchase_price,instructions,rebrickable,bricklink
# BK_BADGE_ORDER_DETAIL=theme,tag,year,parts,owner,storage,purchase_date,rebrickable,bricklink
+3 -6
View File
@@ -17,14 +17,12 @@ static/sets/
# IDE
.vscode/
*.code-workspace
# Temporary
*.csv
/local/
run_local.sh
settings.local.json
/offline/
# Apple idiocy
.DS_Store
@@ -35,7 +33,6 @@ vitepress/
# Local data
offline/
data/
# Hidden folders
.?*
TODO.md
run-local.sh
test-server.sh
+81 -416
View File
@@ -1,400 +1,17 @@
# Changelog
## 1.4
## Unreleased
### Bug Fixes
### 1.3
- **Fixed client-side table sorting corruption** (Issue #136): Resolved data corruption when using sort buttons with DataTables header sorting in client-side pagination mode
- Sort buttons now trigger actual table header clicks instead of using separate `columns.sort()`
- Header clicks sync button states to match current sort
- Prevents misaligned images, colors, and links when mixing sorting methods
- **Fixed storage deletion error handling**: Added proper validation and user-friendly error messages when attempting to delete storage locations that are still in use
- Shows detailed count of items using the storage (sets, individual minifigures, individual parts, part lots)
- Provides clickable link to storage details page for easy navigation
- Prevents accidental deletion of storage locations with referenced items
- **Fixed bulk parts redirect**: Corrected endpoint reference from `individual_part.list_all` to `individual_part.list` after route function rename
- **Fixed purchase location templates**: Created missing template files for purchase location pages
- **Fixed set refresh functionality**: Resolved issues with refreshing sets from Rebrickable
- Fixed foreign key constraint errors during refresh by reusing existing set IDs instead of generating new UUIDs
- Implemented UPDATE-then-INSERT pattern to properly update existing parts while preserving user tracking data
- Part quantities now correctly sync with Rebrickable during refresh
- User tracking data (`checked`, `missing`, `damaged`) is preserved across refreshes
- New parts from Rebrickable are added to local inventory during refresh
- Orphaned parts (parts no longer in Rebrickable's inventory) are now properly removed during refresh
- Refresh now works correctly for both set parts and minifigure parts
- Uses temporary tracking table to identify which parts are still valid before cleanup
- **Fixed Socket.IO connections behind reverse proxies**: Resolved WebSocket disconnection issues when using Traefik, Nginx, or other reverse proxies
- Root cause: Setting `BK_DOMAIN_NAME` enables strict CORS checking that fails with reverse proxies
- Solution: Leave `BK_DOMAIN_NAME` empty for reverse proxy deployments (allows all origins by default)
- Added debug logging for Socket.IO connections to help troubleshoot proxy issues
- **Fixed bulk import hanging on empty set numbers**: Resolved issue where trailing commas in bulk import input would cause infinite loops
- Empty strings from trailing commas (e.g., `"10312, 21348, "`) are now filtered out before processing
- Prevents "Set number cannot be empty" errors from blocking the bulk import queue
- **Added notes display toggles**: Added configuration options to show/hide notes on grid and detail views
- New `BK_SHOW_NOTES_GRID` setting (default: `false`) - controls whether notes appear on grid view cards
- New `BK_SHOW_NOTES_DETAIL` setting (default: `true`) - controls whether notes appear on set detail pages
- Notes display as an info alert box below badges when enabled
- Both settings can be toggled in Admin -> Live Settings panel without container restart
- Fixed consolidated SQL query to include description field for proper notes display in server-side pagination
- **Fixed permission denied when running as non-root user** (Issue #138): Resolved container startup failure when using `user:` directive in docker-compose
- Added `chmod -R a+rX /app` to Dockerfile to ensure all files are readable regardless of build environment
- Added commented `user:` example in `compose.yaml` to document non-root support
### Breaking Changes
- **Parts default order column names changed**: The `BK_PARTS_DEFAULT_ORDER` environment variable now uses `"combined"` instead of `"bricktracker_parts"` for column references
- If you have a custom `BK_PARTS_DEFAULT_ORDER` setting, update column references:
- `"bricktracker_parts"."spare"``"combined"."spare"`
- `"bricktracker_parts"."part"``"combined"."part"`
- `"bricktracker_parts"."quantity"``"combined"."quantity"`
- Or remove the custom setting to use the new defaults
- See `.env.sample` for the full list of available column names
### New Features
- **Sortable Checked column** (Issue #137): The "Checked" column in set inventory tables can now be sorted
- Click the "Checked" header to sort by checked/unchecked status
- Works in both parts table and part lots table
- **Quick-add individual parts toggle**: New `BK_HIDE_QUICK_ADD_INDIVIDUAL_PARTS` setting to hide the quick-add menu in set parts tables
- Hides the "Add to individual parts" option in the row menu dropdown
- Useful when you want individual parts tracking enabled but don't need quick-add from set inventory
- **Individual Minifigures Tracking**
- Track loose/individual minifigures outside of sets
- Part-level tracking for individual minifigures with problem states (missing/damaged/checked)
- Complete metadata support (owners, tags, statuses, storage, purchase info)
- Purchase tracking with date, location, and price
- Quick navigation from set minifigures to individual instances
- Filter and search capabilities
- Feature flags:
- `BK_HIDE_INDIVIDUAL_MINIFIGURES`: Hides individual minifigures UI elements (navbar menu item, links from minifigure detail pages)
- `BK_DISABLE_INDIVIDUAL_MINIFIGURES`: Enables read-only mode - all individual minifigure pages remain accessible but with all editing fields disabled (quantity, parts table, metadata inputs), delete buttons hidden, and write operations blocked.
- **Individual Parts Tracking**
- Track loose parts outside of sets and minifigures
- Quick-add functionality from set parts tables
- Complete metadata support (owners, tags, storage, purchase info)
- Problem tracking (missing/damaged/checked states)
- Purchase tracking with date, location, and price
- Bulk part addition interface
- Feature flags:
- `BK_HIDE_INDIVIDUAL_PARTS`: Hides individual parts UI elements (navbar menu item, "Add Parts" button, links from part detail pages)
- `BK_DISABLE_INDIVIDUAL_PARTS`: Enables read-only mode - all individual parts and lot pages remain accessible but with all editing fields disabled (quantity, missing/damaged, parts table, metadata inputs), delete buttons hidden, "Add Parts" menu item removed, and write operations blocked. The /add/ page also hides the "Adding individual parts?" section.
- **Part Lots System**
- Organize individual parts into logical lots/collections
- Lot-level metadata (name, description, created date)
- Shared metadata across lot (storage, purchase info)
- View all parts in a lot with filtering
- **Purchase Location Management**
- Centralized purchase location tracking for sets, individual minifigures, parts, and lots
- New purchase location management page (`/purchase-locations/`)
- Track which items were purchased from each location
- Integrated with existing storage and owner metadata systems
- **Rebrickable Color Database**
- Caches color information from Rebrickable API
- Provides BrickLink color ID mapping
- Reduces repeated API calls for color data
- Supports export functionality with correct color IDs
- **Export Functionality**
- Added export system in admin panel for sets, parts, and problem parts
- Export accordion in `/admin/` with three main categories:
- **Export Sets**: Rebrickable CSV format for collection tracking
- **Export All Parts**: Three formats available:
- Rebrickable CSV (Part, Color, Quantity)
- LEGO Pick-a-Brick CSV (elementId, quantity)
- BrickLink XML (wanted list format)
- **Export Missing/Damaged Parts**: Same three formats as parts exports
- All exports aggregate quantities automatically (parts by part+color, LEGO by element ID)
- BrickLink exports use proper BrickLink part numbers and color IDs when available
- Format information displayed in UI for user guidance
- **Badge Order Customization**
- Added customizable badge ordering for set cards and detail pages
- Separate configurations for Grid view (`/sets/` cards) and Detail view (individual set pages)
- Configure via environment variables in `.env` file:
- `BK_BADGE_ORDER_GRID`: Comma-separated badge keys for grid view (default: theme,year,parts,total_minifigures,owner)
- `BK_BADGE_ORDER_DETAIL`: Comma-separated badge keys for detail view (default: all 16 badges)
- Can also be configured via Live Settings page in admin panel under "Default Ordering & Formatting"
- Changes apply immediately without restart when edited via admin panel
- 16 available badge types: theme, tag, year, parts, instance_count, total_minifigures, total_missing, total_damaged, owner, storage, purchase_date, purchase_location, purchase_price, instructions, rebrickable, bricklink
- **Front Page Parts Display**
- Added latest/random parts section to the front page alongside sets and minifigures
- Shows 6 parts with quantity badges and other relevant information
- Respects `BK_RANDOM` configuration (random selection when enabled, latest when disabled)
- Respects `BK_HIDE_SPARE_PARTS` configuration
- Respects `BK_HIDE_ALL_PARTS` configuration for "All parts" button visibility
- **NOT Filter Toggle Buttons**
- Added toggle buttons next to all filter dropdowns to switch between "equals" and "not equals" modes
- Visual feedback: Button displays red with "not equals" icon (≠) when in NOT mode
- Works with all filter types: Status, Theme, Owner, Storage, Purchase Location, Tag, and Year
- Supports both client-side and server-side pagination modes
- Filter chains persist NOT states across page reloads via URL parameters (e.g., `?theme=-frozen&status=-has-missing`)
- Clear filters button resets all toggle states to equals mode
- Enables complex filter combinations like "Show me 2025 sets that are NOT Frozen theme AND have missing pieces"
- **Notes/Comments Field**
- Added general notes field to set details for storing custom notes and comments
- Accessible via Management -> Notes accordion section on set detail pages
- Auto-save functionality with visual feedback (save icon updates on change)
- Notes display prominently below badges on set cards when populated
- Supports multi-line text input with configurable row height
- Clear button to quickly remove notes
- **Bulk Set Refresh**
- Added batch refresh functionality for updating multiple sets at once
- New "Bulk Refresh" button appears on Admin -> Sets needing refresh page
- Pre-populates text-area with comma-separated list of all sets needing refresh
- Follows same pattern as bulk add with progress tracking and set card preview
- Shows real-time progress with current set being processed
- Failed sets remain in input field for easy retry
### Database Improvements
- **Standardized ON DELETE Behavior**: Unified foreign key deletion handling across all metadata tables
- All metadata foreign keys now use RESTRICT (prevent deletion if referenced)
- Prevents accidental deletion of storage locations or purchase locations that are in use
- **Performance Indexes Added**: New composite indexes for common query patterns
- `idx_individual_parts_lot_id_part_color` - Optimizes listing parts within a lot
- `idx_individual_parts_missing_damaged` - Optimizes finding parts with problems
- `idx_individual_minifigure_parts_checked` - Optimizes finding unchecked parts in minifigures
- **Consolidated Metadata Tables**: Migration 0027 removes foreign key constraints from metadata junction tables
- `bricktracker_set_owners`, `bricktracker_set_tags`, `bricktracker_set_statuses` now accept any entity type
- Enables reusing metadata tables for sets, individual minifigures, individual parts, and lots
- **Fixed Schema Drop Script**: Resolved foreign key constraint errors during database reset
- Added proper table drop ordering (children before parents)
- Implemented `PRAGMA foreign_keys OFF/ON` wrapping
- Includes all new tables from migrations 0021-0027
### Configuration & Environment Variables
- **New Configuration Options**:
- `BK_HIDE_INDIVIDUAL_MINIFIGURES` - Hide individual minifigures UI elements in navigation
- `BK_DISABLE_INDIVIDUAL_MINIFIGURES` - Block write operations for individual minifigures (view-only mode)
- `BK_HIDE_INDIVIDUAL_PARTS` - Hide individual parts UI elements in navigation
- `BK_DISABLE_INDIVIDUAL_PARTS` - Block write operations for individual parts (view-only mode)
- `BK_BADGE_ORDER_GRID` - Customize badge order on set cards in grid view (comma-separated list)
- `BK_BADGE_ORDER_DETAIL` - Customize badge order on set detail pages (comma-separated list)
- `BK_SHOW_NOTES_GRID` - Show notes on set cards in grid view (default: false)
- `BK_SHOW_NOTES_DETAIL` - Show notes on set detail pages (default: true)
- All new settings support live configuration updates via Admin panel
### Technical Improvements
- **Route Protection Decorators**: New decorator pattern for feature flag enforcement
- `@require_individual_minifigures_write` - Blocks writes when feature is disabled
- `@require_individual_parts_write` - Blocks writes when feature is disabled
- Allows viewing existing data while preventing new additions
- **SQL Query Organization**: New query directory structure for individual features
- `bricktracker/sql/individual_minifigure/` - All individual minifigure queries
- `bricktracker/sql/individual_part/` - All individual part queries
- `bricktracker/sql/individual_part_lot/` - All part lot queries
- `bricktracker/sql/rebrickable_colors/` - Color reference queries
- `bricktracker/sql/rebrickable_parts/` - Part reference queries
- **Database Migrations**: 7 new migrations (0021-0027)
- 0021: Individual minifigures and parts tables
- 0022: Individual part lots system with proper foreign keys
- 0023: Performance indexes for individual features
- 0024: Rebrickable colors cache table
- 0025: Additional composite indexes for query optimization
- 0026: Standardized ON DELETE behavior across metadata tables
- 0027: Consolidated metadata tables (remove FK constraints)
## 1.3.1
### New Functionality
- **Database Integrity Check and Cleanup**
- Added database integrity scanner to detect orphaned records and foreign key violations
- New "Check Database Integrity" button in admin panel scans for issues
- Detects orphaned sets, parts, and parts with missing set references
- Warning prompts users to backup database before cleanup
- Cleanup removes all orphaned records in one operation
- Detailed scan results show affected records with counts and descriptions
- **Database Optimization**
- Added "Optimize Database" button to re-create performance indexes
- Safe to run after database imports or restores
- Re-creates all indexes from migration #19 using `CREATE INDEX IF NOT EXISTS`
- Runs `ANALYZE` to rebuild query statistics
- Runs `PRAGMA optimize` for additional query plan optimization
- Helpful after importing backup databases that may lack performance optimizations
### Bug Fixes
- **Fixed foreign key constraint errors during set imports**: Resolved `FOREIGN KEY constraint failed` errors when importing sets with parts and minifigures
- Fixed insertion order in `bricktracker/part.py`: Parent records (`rebrickable_parts`) now inserted before child records (`bricktracker_parts`)
- Fixed insertion order in `bricktracker/minifigure.py`: Parent records (`rebrickable_minifigures`) now inserted before child records (`bricktracker_minifigures`)
- Ensures foreign key references are valid when SQLite checks constraints
- **Fixed set metadata updates**: Owner, status, and tag checkboxes now properly persist changes on set details page
- Fixed `update_set_state()` method to commit database transactions (was using deferred execution without commit)
- All metadata updates (owner, status, tags, storage, purchase info) now work consistently
- **Fixed nil image downloads**: Placeholder images for parts and minifigures without images now download correctly
- Removed early returns that prevented nil image downloads
- Nil images now properly saved to configured folders (e.g., `/app/data/parts/nil.jpg`)
- **Fixed error logging for missing files**: File not found errors now show actual configured folder paths instead of just URL paths
- Added detailed logging showing both file path and configured folder for easier debugging
- **Fixed minifigure filters in client-side pagination mode**: Owner and other filters now work correctly when server-side pagination is disabled
- Aligned filter behavior with parts page (applies filters server-side, then loads filtered data for client-side search)
## 1.3
### Breaking Changes
#### Data Folder Consolidation
> **Warning**
> **BREAKING CHANGE**: Version 1.3 consolidates all user data into a single `data/` folder for easier backup and volume mapping.
- **Path handling**: All relative paths are now resolved relative to the application root (`/app` in Docker)
- Example: `data/app.db``/app/data/app.db`
- **New default paths** (automatically used for new installations):
- Database: `data/app.db` (was: `app.db` in root)
- Configuration: `data/.env` (was: `.env` in root) - *optional, backward compatible*
- CSV files: `data/*.csv` (was: `*.csv` in root)
- Images/PDFs: `data/{sets,parts,minifigures,instructions}/` (was: `static/*`)
- **Configuration file (.env) location**:
- New recommended location: `data/.env` (included in data volume, settings persist)
- Backward compatible: `.env` in root still works (requires volume mount for admin panel persistence)
- Priority: `data/.env` > `.env` (automatic detection, no migration required)
- **Migration options**:
1. **Migrate to new structure** (recommended - single volume for all data including .env)
2. **Keep current setup** (backward compatible - old paths continue to work)
See [Migration Guide](docs/migration_guide.md) for detailed instructions
#### Default Minifigures Folder Change
> **Warning**
> **BREAKING CHANGE**: Default minifigures folder path changed from `minifigs` to `minifigures`
- **Impact**: Users who relied on the default `BK_MINIFIGURES_FOLDER` value (without explicitly setting it) will need to either:
1. Set `BK_MINIFIGURES_FOLDER=minifigs` in their environment to maintain existing behavior, or
2. Rename their existing `minifigs` folder to `minifigures`
- **No impact**: Users who already have `BK_MINIFIGURES_FOLDER` explicitly configured
- Improved consistency across documentation and Docker configurations
### New Features
- **Live Settings changes**
- Added live environment variable configuration management system
- Configuration Management interface in admin panel with live preview and badge system
- **Live settings**: Can be changed without application restart (menu visibility, table display, pagination, features)
- **Static settings**: Require restart but can be edited and saved to .env file (authentication, server, database, API keys)
- Advanced badge system showing value status: True/False for booleans, Set/Default/Unset for other values, Changed indicator
- Live API endpoints: `/admin/api/config/update` for immediate changes, `/admin/api/config/update-static` for .env updates
- Form pre-population with current values and automatic page reload after successful live updates
- Fixed environment variable lock detection in admin configuration panel
- Resolved bug where all variables appeared "locked" after saving live settings
- Lock detection now correctly identifies only Docker environment variables set before .env loading
- Variables set via Docker's `environment:` directive remain properly locked
- Variables from data/.env or root .env are correctly shown as editable
- Added configuration persistence warning in admin panel
- Warning banner shows when using .env in root (non-persistent)
- Success banner shows when using data/.env (persistent)
- Provides migration instructions directly in the UI
- **Spare Parts**
- Added spare parts control options
- `BK_SKIP_SPARE_PARTS`: Skip importing spare parts when downloading sets from Rebrickable (parts not saved to database)
- `BK_HIDE_SPARE_PARTS`: Hide spare parts from all parts lists (parts must still be in database)
- Both options are live-changeable in admin configuration panel
- Options can be used independently or together for flexible spare parts management
- Affects all parts displays: /parts page, set details accordion, minifigure parts, and problem parts
- **Pagination**
- Added individual pagination control system per entity type
- `BK_SETS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for sets
- `BK_PARTS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for parts
- `BK_MINIFIGURES_SERVER_SIDE_PAGINATION`: Enable/disable pagination for minifigures
- Device-specific pagination sizes (desktop/mobile) for each entity type
- Supports search, filtering, and sorting in both server-side and client-side modes
- **Peeron Instructions**
- Added Peeron instructions integration
- Full image caching system with automatic thumbnail generation
- Optimized HTTP calls by downloading full images once and generating thumbnails locally
- Automatic cache cleanup after PDF generation to save disk space
- **Parts checkmark**
- Added parts checking/inventory system
- New "Checked" column in parts tables for tracking inventory progress
- Checkboxes to mark parts as verified during set walkthrough
- `BK_HIDE_TABLE_CHECKED_PARTS`: Environment variable to hide checked column
- **Set Consolidation**
- Added set consolidation/grouping functionality
- Automatic grouping of duplicate sets on main sets page
- Shows instance count with stack icon badge (e.g., "3 copies")
- Expandable drawer interface to view all set copies individually
- Full set cards for each instance with all badges, statuses, and functionality
- `BK_SETS_CONSOLIDATION`: Environment variable to enable/disable consolidation (default: false)
- Backwards compatible - when disabled, behaves exactly like original individual view
- Improved theme filtering: handles duplicate theme names correctly
- Fixed set number sorting: proper numeric sorting in both ascending and descending order
- Mixed status indicators for consolidated sets: three-state checkboxes (unchecked/partial/checked) with count badges
- Template logic handles three states: none (0/2), all (2/2), partial (1/2) with visual indicators
- Purple overlay styling for partial states, disabled checkboxes for read-only consolidated status display
- Individual sets maintain full interactive checkbox functionality
- **Statistics**
- Added comprehensive statistics system (#91)
- New Statistics page with collection analytics
- Financial overview: total cost, average price, price range, investment tracking
- Collection metrics: total sets, unique sets, parts count, minifigures count
- Theme distribution statistics with clickable drill-down to filtered sets
- Storage location statistics showing sets per location with value calculations
- Purchase location analytics with spending patterns and date ranges
- Problem tracking: missing and damaged parts statistics
- Clickable numbers throughout statistics that filter to relevant sets
- `BK_HIDE_STATISTICS`: Environment variable to hide statistics menu item
- Year-based analytics: Sets by release year and purchases by year
- Sets by Release Year: Shows collection distribution across LEGO release years
- Purchases by Year: Tracks spending patterns and acquisition timeline
- Year summary with peak collection/spending years and timeline insights
- Enhanced statistics interface and functionality
- Collapsible sections: All statistics sections have clickable headers to expand/collapse
- Collection growth charts: Line charts showing sets, parts, and minifigures over time
- Configuration options: `BK_STATISTICS_SHOW_CHARTS` and `BK_STATISTICS_DEFAULT_EXPANDED` environment variables
- **Admin Page Section Expansion**
- Added configurable admin page section expansion
- `BK_ADMIN_DEFAULT_EXPANDED_SECTIONS`: Environment variable to specify which sections expand by default
- Accepts comma-separated list of section names (e.g., "database,theme,instructions")
- Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
- URL parameters take priority over configuration (e.g., `?open_database=1`)
- Database section expanded by default to maintain original behavior
- Smart metadata handling: sub-section expansion automatically expands parent metadata section
- **Duplicate Sets filter**
- Added duplicate sets filter functionality
- New filter button on Sets page to show only duplicate/consolidated sets
- `BK_SHOW_SETS_DUPLICATE_FILTER`: Environment variable to show/hide the filter button (default: true)
- Works with both server-side and client-side pagination modes
- Consolidated mode: Shows sets that have multiple instances
- Non-consolidated mode: Shows sets that appear multiple times in collection
- **Bricklink Links**
- Added BrickLink links for sets
- BrickLink badge links now appear on set cards and set details pages alongside Rebrickable links
- `BK_BRICKLINK_LINK_SET_PATTERN`: New environment variable for BrickLink set URL pattern (default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num})
- Controlled by existing `BK_BRICKLINK_LINKS` environment variable
- **Dark Mode**
- Added dark mode support
- `BK_DARK_MODE`: Environment variable to enable dark mode theme (default: false)
- Uses Bootstrap 5.3's native dark mode with `data-bs-theme` attribute
- Live-changeable via Admin > Live Settings
- Setting persists across sessions via .env file
- **Alphanumetic Set Number**
- Added alphanumeric set number support
- Database schema change: Set number column changed from INTEGER to TEXT
- Supports LEGO promotional and special edition sets with letters in their numbers
- Examples: "McDR6US-1", "COMCON035-1", "EG00021-1"
### Improvements
- Improved WebSocket/Socket.IO reliability for mobile devices
- Changed connection strategy to polling-first with automatic WebSocket upgrade
- Increased connection timeout to 30 seconds for slow mobile networks
- Added ping/pong keepalive settings (30s timeout, 25s interval)
- Improved server-side connection logging with user agent and transport details
- Add individual pagination control system per entity type
- `BK_SETS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for sets
- `BK_PARTS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for parts
- `BK_MINIFIGURES_SERVER_SIDE_PAGINATION`: Enable/disable pagination for minifigures
- Device-specific pagination sizes (desktop/mobile) for each entity type
- Supports search, filtering, and sorting in both server-side and client-side modes
- Consolidated duplicate code across parts.js, problems.js, and minifigures.js
- Created shared functions in collapsible-state.js for common operations
- Fixed dynamic sort icons across all pages
- Sort icons now properly toggle between ascending/descending states
- Improved DataTable integration
@@ -406,7 +23,76 @@ See [Migration Guide](docs/migration_guide.md) for detailed instructions
- Preserves selection state during dropdown consolidation
- Consistent search behavior (instant for client-side, Enter key for server-side)
- Mobile-friendly pagination navigation
- Added performance optimization
- Add Peeron instructions integration
- Full image caching system with automatic thumbnail generation
- Optimized HTTP calls by downloading full images once and generating thumbnails locally
- Automatic cache cleanup after PDF generation to save disk space
- Add parts checking/inventory system
- New "Checked" column in parts tables for tracking inventory progress
- Checkboxes to mark parts as verified during set walkthrough
- `BK_HIDE_TABLE_CHECKED_PARTS`: Environment variable to hide checked column
- Add set consolidation/grouping functionality
- Automatic grouping of duplicate sets on main sets page
- Shows instance count with stack icon badge (e.g., "3 copies")
- Expandable drawer interface to view all set copies individually
- Full set cards for each instance with all badges, statuses, and functionality
- `BK_SETS_CONSOLIDATION`: Environment variable to enable/disable consolidation (default: false)
- Backwards compatible - when disabled, behaves exactly like original individual view
- Improved theme filtering: handles duplicate theme names correctly
- Fixed set number sorting: proper numeric sorting in both ascending and descending order
- Mixed status indicators for consolidated sets: three-state checkboxes (unchecked/partial/checked) with count badges
- Template logic handles three states: none (0/2), all (2/2), partial (1/2) with visual indicators
- Purple overlay styling for partial states, disabled checkboxes for read-only consolidated status display
- Individual sets maintain full interactive checkbox functionality
- Add comprehensive statistics system (#91)
- New Statistics page with collection analytics
- Financial overview: total cost, average price, price range, investment tracking
- Collection metrics: total sets, unique sets, parts count, minifigures count
- Theme distribution statistics with clickable drill-down to filtered sets
- Storage location statistics showing sets per location with value calculations
- Purchase location analytics with spending patterns and date ranges
- Problem tracking: missing and damaged parts statistics
- Clickable numbers throughout statistics that filter to relevant sets
- `BK_HIDE_STATISTICS`: Environment variable to hide statistics menu item
- Year-based analytics: Sets by release year and purchases by year
- Sets by Release Year: Shows collection distribution across LEGO release years
- Purchases by Year: Tracks spending patterns and acquisition timeline
- Year summary with peak collection/spending years and timeline insights
- Enhanced statistics interface and functionality
- Collapsible sections: All statistics sections have clickable headers to expand/collapse
- Collection growth charts: Line charts showing sets, parts, and minifigures over time
- Configuration options: `BK_STATISTICS_SHOW_CHARTS` and `BK_STATISTICS_DEFAULT_EXPANDED` environment variables
- Add configurable admin page section expansion
- `BK_ADMIN_DEFAULT_EXPANDED_SECTIONS`: Environment variable to specify which sections expand by default
- Accepts comma-separated list of section names (e.g., "database,theme,instructions")
- Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
- URL parameters take priority over configuration (e.g., `?open_database=1`)
- Database section expanded by default to maintain original behavior
- Smart metadata handling: sub-section expansion automatically expands parent metadata section
- Add duplicate sets filter functionality
- New filter button on Sets page to show only duplicate/consolidated sets
- `BK_SHOW_SETS_DUPLICATE_FILTER`: Environment variable to show/hide the filter button (default: true)
- Works with both server-side and client-side pagination modes
- Consolidated mode: Shows sets that have multiple instances
- Non-consolidated mode: Shows sets that appear multiple times in collection
- Add BrickLink links for sets
- BrickLink badge links now appear on set cards and set details pages alongside Rebrickable links
- `BK_BRICKLINK_LINK_SET_PATTERN`: New environment variable for BrickLink set URL pattern (default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num})
- Controlled by existing `BK_BRICKLINK_LINKS` environment variable
- Add live environment variable configuration management system
- Configuration Management interface in admin panel with live preview and badge system
- Live settings: Can be changed without application restart (menu visibility, table display, pagination, features)
- Static settings: Require restart but can be edited and saved to .env file (authentication, server, database, API keys)
- Advanced badge system showing value status: True/False for booleans, Set/Default/Unset for other values, Changed indicator
- Live API endpoints: `/admin/api/config/update` for immediate changes, `/admin/api/config/update-static` for .env updates
- Form pre-population with current values and automatic page reload after successful live updates
- **BREAKING CHANGE**: Default minifigures folder path changed from `minifigs` to `minifigures`
- Impact: Users who relied on the default `BK_MINIFIGURES_FOLDER` value (without explicitly setting it) will need to either:
1. Set `BK_MINIFIGURES_FOLDER=minifigs` in their environment to maintain existing behavior, or
2. Rename their existing `minifigs` folder to `minifigures`
- No impact: Users who already have `BK_MINIFIGURES_FOLDER` explicitly configured
- Improved consistency across documentation and Docker configurations
- Add performance optimization
- SQLite WAL Mode:
- Increased cache size to 10,000 pages (~40MB) for faster query execution
- Set temp_store to memory for accelerated temporary operations
@@ -422,29 +108,8 @@ See [Migration Guide](docs/migration_guide.md) for detailed instructions
- Statistics Query Optimization:
- Replaced separate subqueries with efficient CTEs (Common Table Expressions)
- Consolidated aggregations for set, part, minifigure, and financial statistics
- Added default image handling for sets without images
- Sets with null/missing images from Rebrickable API now display placeholder image
- Automatic fallback to nil.png from parts folder for set previews
- Copy of nil placeholder saved as set image for consistent display across all routes
- Prevents errors when downloading sets that have no set_img_url in API response
- Fixed instructions download from Rebrickable
- Replaced cloudscraper with standard requests library
- Resolves 403 Forbidden errors when downloading instruction PDFs
- Fixed instructions display and URL generation
- Fixed "Open PDF" button links to use correct data route
- Corrected path resolution for data/instructions folder
- Fixed instruction listing page to scan correct folder location
- Fixed Peeron PDF creation to use correct data folder path
- Fixed foreign key constraint error when adding sets
- Rebrickable set is now inserted before BrickTracker set to satisfy FK constraints
- Resolves "FOREIGN KEY constraint failed" error when adding sets
- Fixed atomic transaction handling for set downloads
- All database operations during set addition now use deferred execution
- Ensures all-or-nothing behavior: if any part fails (set info, parts, minifigs), nothing is committed
- Prevents partial set additions that would leave the database in an inconsistent state
- Metadata updates (owners, tags) now defer until final commit
## 1.2.4
### 1.2.4
> **Warning**
> To use the new BrickLink color parameter in URLs, update your `.env` file:
+4 -10
View File
@@ -2,19 +2,13 @@ FROM python:3-slim
WORKDIR /app
# Copy requirements first (so pip install can be cached)
COPY requirements.txt .
# Python library requirements
RUN pip install --no-cache-dir -r requirements.txt
# Bricktracker
COPY . .
# Ensure all files are readable by non-root users (supports user: directive in compose)
RUN chmod -R a+rX /app
# Fix line endings and set executable permissions for entrypoint script
RUN sed -i 's/\r$//' entrypoint.sh && chmod +x entrypoint.sh
# Set executable permissions for entrypoint script
RUN chmod +x entrypoint.sh
# Python library requirements
RUN pip --no-cache-dir install -r requirements.txt
ENTRYPOINT ["./entrypoint.sh"]
+10 -8
View File
@@ -1,13 +1,9 @@
<img src="static/brick.png" height="100" width="100">
# BrickTracker
A web application for organizing and tracking LEGO sets, parts, and minifigures. Uses the Rebrickable API to fetch LEGO data and allows users to track missing pieces and collection status.
<a href="https://www.buymeacoffee.com/frederikb" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" height="41" width="174"></a>
<a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=48JEEKLCGB8DJ"><img src="./docs/images/blue.svg" height="40"></a>
## Features
- Track multiple LEGO sets with their parts and minifigures
@@ -20,13 +16,19 @@ A web application for organizing and tracking LEGO sets, parts, and minifigures.
## Prefered setup: pre-build docker image
See [Quick Start](https://bricktracker.baerentsen.space/quick-start) to get up and running right away.
Use the provided [compose.yaml](compose.yaml) file.
See [Walk Through](https://bricktracker.baerentsen.space/tutorial-first-steps) for a more detailed guide.
See [Quickstart](docs/quickstart.md) to get up and running right away.
See [Setup](docs/setup.md) for a more setup guide.
## Usage
See [first steps](docs/first-steps.md).
## Documentation
Most of the pages should be self explanatory to use.
However, you can find more specific documentation in the [documentation](https://bricktracker.baerentsen.space/whatis).
However, you can find more specific documentation in the [documentation](docs/DOCS.md).
You can find screenshots of the application in the [overview](https://bricktracker.baerentsen.space/overview) documentation.
You can find screenshots of the application in the [overview](docs/overview.md) documentation file.
+2 -67
View File
@@ -1,8 +1,6 @@
import logging
import os
import sys
import time
from pathlib import Path
from zoneinfo import ZoneInfo
from flask import current_app, Flask, g
@@ -17,7 +15,6 @@ from bricktracker.version import __version__
from bricktracker.views.add import add_page
from bricktracker.views.admin.admin import admin_page
from bricktracker.views.admin.database import admin_database_page
from bricktracker.views.admin.export import admin_export_page
from bricktracker.views.admin.image import admin_image_page
from bricktracker.views.admin.instructions import admin_instructions_page
from bricktracker.views.admin.owner import admin_owner_page
@@ -28,76 +25,20 @@ from bricktracker.views.admin.status import admin_status_page
from bricktracker.views.admin.storage import admin_storage_page
from bricktracker.views.admin.tag import admin_tag_page
from bricktracker.views.admin.theme import admin_theme_page
from bricktracker.views.data import data_page
from bricktracker.views.error import error_404
from bricktracker.views.index import index_page
from bricktracker.views.individual_minifigure import individual_minifigure_page
from bricktracker.views.individual_part import individual_part_page
from bricktracker.views.instructions import instructions_page
from bricktracker.views.login import login_page
from bricktracker.views.individual_minifigure import individual_minifigure_page
from bricktracker.views.minifigure import minifigure_page
from bricktracker.views.part import part_page
from bricktracker.views.purchase_location import purchase_location_page
from bricktracker.views.set import set_page
from bricktracker.views.statistics import statistics_page
from bricktracker.views.storage import storage_page
from bricktracker.views.wish import wish_page
def load_env_file() -> None:
"""Load .env file into os.environ with priority: data/.env > .env (root)
Also stores which BK_ variables were set via Docker environment (before loading .env)
so we can detect locked variables in the admin panel.
"""
import json
data_env = Path('data/.env')
root_env = Path('.env')
# Store which BK_ variables were already in environment BEFORE loading .env
# These are "locked" (set via Docker's environment: directive)
docker_env_vars = {k: v for k, v in os.environ.items() if k.startswith('BK_')}
# Store this in a way the admin panel can access it
# We'll use an environment variable to store the JSON list of locked var names
os.environ['_BK_DOCKER_ENV_VARS'] = json.dumps(list(docker_env_vars.keys()))
env_file = None
if data_env.exists():
env_file = data_env
logging.info(f"Loading environment from: {data_env}")
elif root_env.exists():
env_file = root_env
logging.info(f"Loading environment from: {root_env} (consider migrating to data/.env)")
if env_file:
# Simple .env parser (no external dependencies needed)
with open(env_file, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip()
# Skip comments and empty lines
if not line or line.startswith('#'):
continue
# Parse key=value
if '=' in line:
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
# Remove quotes if present
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
elif value.startswith("'") and value.endswith("'"):
value = value[1:-1]
# Only set if not already in environment (environment variables take precedence)
if key not in os.environ:
os.environ[key] = value
def setup_app(app: Flask) -> None:
# Load .env file before configuration (if not already loaded by Docker Compose)
load_env_file()
# Load the configuration
BrickConfigurationList(app)
@@ -108,14 +49,12 @@ def setup_app(app: Flask) -> None:
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s', # noqa: E501
)
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.basicConfig(
stream=sys.stdout,
level=logging.INFO,
format='[%(asctime)s] %(levelname)s - %(message)s',
)
logging.getLogger().setLevel(logging.INFO)
# Load the navbar
Navbar(app)
@@ -139,15 +78,12 @@ def setup_app(app: Flask) -> None:
# Register app routes
app.register_blueprint(add_page)
app.register_blueprint(data_page)
app.register_blueprint(index_page)
app.register_blueprint(individual_minifigure_page)
app.register_blueprint(individual_part_page)
app.register_blueprint(instructions_page)
app.register_blueprint(login_page)
app.register_blueprint(individual_minifigure_page)
app.register_blueprint(minifigure_page)
app.register_blueprint(part_page)
app.register_blueprint(purchase_location_page)
app.register_blueprint(set_page)
app.register_blueprint(statistics_page)
app.register_blueprint(storage_page)
@@ -156,7 +92,6 @@ def setup_app(app: Flask) -> None:
# Register admin routes
app.register_blueprint(admin_page)
app.register_blueprint(admin_database_page)
app.register_blueprint(admin_export_page)
app.register_blueprint(admin_image_page)
app.register_blueprint(admin_instructions_page)
app.register_blueprint(admin_retired_page)
+9 -21
View File
@@ -10,34 +10,28 @@ from typing import Any, Final
CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'AUTHENTICATION_PASSWORD', 'd': ''},
{'n': 'AUTHENTICATION_KEY', 'd': ''},
# BrickLink minifigure links disabled - Rebrickable doesn't provide BrickLink minifigure IDs
# {'n': 'BRICKLINK_LINK_MINIFIGURE_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?M={figure}'}, # noqa: E501
{'n': 'BRICKLINK_LINK_PART_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}'}, # noqa: E501
{'n': 'BRICKLINK_LINK_SET_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num}'}, # noqa: E501
{'n': 'BRICKLINK_LINKS', 'c': bool},
{'n': 'DATABASE_PATH', 'd': 'data/app.db'},
{'n': 'DATABASE_PATH', 'd': './app.db'},
{'n': 'DATABASE_TIMESTAMP_FORMAT', 'd': '%Y-%m-%d-%H-%M-%S'},
{'n': 'DEBUG', 'c': bool},
{'n': 'DEFAULT_TABLE_PER_PAGE', 'd': 25, 'c': int},
{'n': 'DESCRIPTION_BADGE_MAX_LENGTH', 'd': 15, 'c': int},
{'n': 'DISABLE_INDIVIDUAL_MINIFIGURES', 'c': bool},
{'n': 'DISABLE_INDIVIDUAL_PARTS', 'c': bool},
{'n': 'DISABLE_QUICK_ADD_INDIVIDUAL_PARTS', 'c': bool},
{'n': 'HIDE_QUICK_ADD_INDIVIDUAL_PARTS', 'c': bool},
{'n': 'DOMAIN_NAME', 'e': 'DOMAIN_NAME', 'd': ''},
{'n': 'FILE_DATETIME_FORMAT', 'd': '%d/%m/%Y, %H:%M:%S'},
{'n': 'HOST', 'd': '0.0.0.0'},
{'n': 'INDEPENDENT_ACCORDIONS', 'c': bool},
{'n': 'INSTRUCTIONS_ALLOWED_EXTENSIONS', 'd': ['.pdf'], 'c': list}, # noqa: E501
{'n': 'INSTRUCTIONS_FOLDER', 'd': 'data/instructions'},
{'n': 'INSTRUCTIONS_FOLDER', 'd': 'instructions', 's': True},
{'n': 'HIDE_ADD_SET', 'c': bool},
{'n': 'HIDE_ADD_BULK_SET', 'c': bool},
{'n': 'HIDE_ADMIN', 'c': bool},
{'n': 'ADMIN_DEFAULT_EXPANDED_SECTIONS', 'd': ['database'], 'c': list},
{'n': 'HIDE_ALL_INSTRUCTIONS', 'c': bool},
{'n': 'HIDE_ALL_MINIFIGURES', 'c': bool},
{'n': 'HIDE_INDIVIDUAL_MINIFIGURES', 'c': bool},
{'n': 'HIDE_ALL_PARTS', 'c': bool},
{'n': 'HIDE_INDIVIDUAL_PARTS', 'c': bool},
{'n': 'HIDE_ALL_PROBLEMS_PARTS', 'e': 'BK_HIDE_MISSING_PARTS', 'c': bool},
{'n': 'HIDE_ALL_SETS', 'c': bool},
{'n': 'HIDE_ALL_STORAGES', 'c': bool},
@@ -47,8 +41,8 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'HIDE_TABLE_MISSING_PARTS', 'c': bool},
{'n': 'HIDE_TABLE_CHECKED_PARTS', 'c': bool},
{'n': 'HIDE_WISHES', 'c': bool},
{'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"rebrickable_minifigures"."name" ASC'}, # noqa: E501
{'n': 'MINIFIGURES_FOLDER', 'd': 'data/minifigures'},
{'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"combined"."name" ASC'}, # noqa: E501
{'n': 'MINIFIGURES_FOLDER', 'd': 'minifigures', 's': True},
{'n': 'MINIFIGURES_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
{'n': 'MINIFIGURES_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
{'n': 'MINIFIGURES_SERVER_SIDE_PAGINATION', 'c': bool},
@@ -56,7 +50,7 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'PARTS_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'SETS_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'PARTS_DEFAULT_ORDER', 'd': '"rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "combined"."spare" ASC'}, # noqa: E501
{'n': 'PARTS_FOLDER', 'd': 'data/parts'},
{'n': 'PARTS_FOLDER', 'd': 'parts', 's': True},
{'n': 'PARTS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
{'n': 'PARTS_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
{'n': 'PROBLEMS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
@@ -85,28 +79,22 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'REBRICKABLE_LINKS', 'e': 'LINKS', 'c': bool},
{'n': 'REBRICKABLE_PAGE_SIZE', 'd': 100, 'c': int},
{'n': 'RETIRED_SETS_FILE_URL', 'd': 'https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date'}, # noqa: E501
{'n': 'RETIRED_SETS_PATH', 'd': 'data/retired_sets.csv'},
{'n': 'RETIRED_SETS_PATH', 'd': './retired_sets.csv'},
{'n': 'SETS_DEFAULT_ORDER', 'd': '"rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC'}, # noqa: E501
{'n': 'SETS_FOLDER', 'd': 'data/sets'},
{'n': 'SETS_FOLDER', 'd': 'sets', 's': True},
{'n': 'SETS_CONSOLIDATION', 'd': False, 'c': bool},
{'n': 'SHOW_GRID_FILTERS', 'c': bool},
{'n': 'SHOW_GRID_SORT', 'c': bool},
{'n': 'SHOW_SETS_DUPLICATE_FILTER', 'd': True, 'c': bool},
{'n': 'SKIP_SPARE_PARTS', 'c': bool},
{'n': 'HIDE_SPARE_PARTS', 'c': bool},
{'n': 'SOCKET_NAMESPACE', 'd': 'bricksocket'},
{'n': 'SOCKET_PATH', 'd': '/bricksocket/'},
{'n': 'STORAGE_DEFAULT_ORDER', 'd': '"bricktracker_metadata_storages"."name" ASC'}, # noqa: E501
{'n': 'THEMES_FILE_URL', 'd': 'https://cdn.rebrickable.com/media/downloads/themes.csv.gz'}, # noqa: E501
{'n': 'THEMES_PATH', 'd': 'data/themes.csv'},
{'n': 'THEMES_PATH', 'd': './themes.csv'},
{'n': 'TIMEZONE', 'd': 'Etc/UTC'},
{'n': 'USE_REMOTE_IMAGES', 'c': bool},
{'n': 'WISHES_DEFAULT_ORDER', 'd': '"bricktracker_wishes"."rowid" DESC'},
{'n': 'STATISTICS_SHOW_CHARTS', 'd': True, 'c': bool},
{'n': 'STATISTICS_DEFAULT_EXPANDED', 'd': True, 'c': bool},
{'n': 'DARK_MODE', 'c': bool},
{'n': 'BADGE_ORDER_GRID', 'd': ['theme', 'year', 'parts', 'total_minifigures', 'owner'], 'c': list},
{'n': 'BADGE_ORDER_DETAIL', 'd': ['theme', 'tag', 'year', 'parts', 'instance_count', 'total_minifigures', 'total_missing', 'total_damaged', 'owner', 'storage', 'purchase_date', 'purchase_location', 'purchase_price', 'instructions', 'rebrickable', 'bricklink'], 'c': list},
{'n': 'SHOW_NOTES_GRID', 'd': False, 'c': bool},
{'n': 'SHOW_NOTES_DETAIL', 'd': True, 'c': bool},
]
+8 -37
View File
@@ -10,6 +10,7 @@ logger = logging.getLogger(__name__)
LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_BRICKLINK_LINKS',
'BK_DEFAULT_TABLE_PER_PAGE',
'BK_DESCRIPTION_BADGE_MAX_LENGTH',
'BK_INDEPENDENT_ACCORDIONS',
'BK_HIDE_ADD_SET',
'BK_HIDE_ADD_BULK_SET',
@@ -17,9 +18,7 @@ LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_ADMIN_DEFAULT_EXPANDED_SECTIONS',
'BK_HIDE_ALL_INSTRUCTIONS',
'BK_HIDE_ALL_MINIFIGURES',
'BK_HIDE_INDIVIDUAL_MINIFIGURES',
'BK_HIDE_ALL_PARTS',
'BK_HIDE_INDIVIDUAL_PARTS',
'BK_HIDE_ALL_PROBLEMS_PARTS',
'BK_HIDE_ALL_SETS',
'BK_HIDE_ALL_STORAGES',
@@ -28,7 +27,6 @@ LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_HIDE_TABLE_DAMAGED_PARTS',
'BK_HIDE_TABLE_MISSING_PARTS',
'BK_HIDE_TABLE_CHECKED_PARTS',
'BK_DISABLE_QUICK_ADD_INDIVIDUAL_PARTS',
'BK_HIDE_WISHES',
'BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP',
'BK_MINIFIGURES_PAGINATION_SIZE_MOBILE',
@@ -49,19 +47,12 @@ LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_SHOW_GRID_SORT',
'BK_SHOW_SETS_DUPLICATE_FILTER',
'BK_SKIP_SPARE_PARTS',
'BK_HIDE_SPARE_PARTS',
'BK_USE_REMOTE_IMAGES',
'BK_PEERON_DOWNLOAD_DELAY',
'BK_PEERON_MIN_IMAGE_SIZE',
'BK_REBRICKABLE_PAGE_SIZE',
'BK_STATISTICS_SHOW_CHARTS',
'BK_STATISTICS_DEFAULT_EXPANDED',
'BK_DARK_MODE',
# Badge order preferences
'BK_BADGE_ORDER_GRID',
'BK_BADGE_ORDER_DETAIL',
'BK_SHOW_NOTES_GRID',
'BK_SHOW_NOTES_DETAIL',
# Default ordering and formatting
'BK_INSTRUCTIONS_ALLOWED_EXTENSIONS',
'BK_MINIFIGURES_DEFAULT_ORDER',
@@ -71,8 +62,6 @@ LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_STORAGE_DEFAULT_ORDER',
'BK_WISHES_DEFAULT_ORDER',
# URL and Pattern Variables
# BrickLink minifigure links disabled - no ID mapping available
# 'BK_BRICKLINK_LINK_MINIFIGURE_PATTERN',
'BK_BRICKLINK_LINK_PART_PATTERN',
'BK_BRICKLINK_LINK_SET_PATTERN',
'BK_REBRICKABLE_IMAGE_NIL',
@@ -95,7 +84,6 @@ RESTART_REQUIRED_VARS: Final[List[str]] = [
'BK_AUTHENTICATION_KEY',
'BK_DATABASE_PATH',
'BK_DEBUG',
'BK_DISABLE_INDIVIDUAL_PARTS',
'BK_DISABLE_INDIVIDUAL_MINIFIGURES',
'BK_DOMAIN_NAME',
'BK_HOST',
@@ -121,20 +109,7 @@ class ConfigManager:
"""Manages live configuration updates for BrickTracker"""
def __init__(self):
# Check for .env in data folder first (v1.3+), fallback to root (backward compatibility)
data_env = Path('data/.env')
root_env = Path('.env')
if data_env.exists():
self.env_file_path = data_env
logger.info("Using configuration file: data/.env")
elif root_env.exists():
self.env_file_path = root_env
logger.info("Using configuration file: .env (consider migrating to data/.env)")
else:
# Default to data/.env for new installations
self.env_file_path = data_env
logger.info("Configuration file will be created at: data/.env")
self.env_file_path = Path('.env')
def get_current_config(self) -> Dict[str, Any]:
"""Get current configuration values for live-changeable variables"""
@@ -191,8 +166,8 @@ class ConfigManager:
def _cast_value(self, var_name: str, value: Any) -> Any:
"""Cast value to appropriate type based on variable name"""
# List variables (admin sections, badge order) - Check this FIRST before boolean check
if any(keyword in var_name.lower() for keyword in ['sections', 'badge_order']):
# List variables (admin sections) - Check this FIRST before boolean check
if 'sections' in var_name.lower():
if isinstance(value, str):
return [section.strip() for section in value.split(',') if section.strip()]
elif isinstance(value, list):
@@ -200,13 +175,13 @@ class ConfigManager:
else:
return []
# Integer variables (pagination sizes, delays, etc.) - Check BEFORE boolean check
if any(keyword in var_name.lower() for keyword in ['_size', '_page', 'delay', 'min_', 'per_page', 'page_size']):
if any(keyword in var_name.lower() for keyword in ['_size', '_page', 'delay', 'min_', 'per_page', 'page_size', '_length']):
try:
return int(value)
except (ValueError, TypeError):
return 0
# Boolean variables - More specific patterns to avoid conflicts
if any(keyword in var_name.lower() for keyword in ['hide_', 'disable_', 'server_side_pagination', '_links', 'random', 'skip_', 'show_', 'use_', '_consolidation', '_charts', '_expanded']):
if any(keyword in var_name.lower() for keyword in ['hide_', 'server_side_pagination', '_links', 'random', 'skip_', 'show_', 'use_', '_consolidation', '_charts', '_expanded']):
if isinstance(value, str):
return value.lower() in ('true', '1', 'yes', 'on')
return bool(value)
@@ -229,8 +204,6 @@ class ConfigManager:
def _update_env_file(self, var_name: str, value: Any) -> None:
"""Update the .env file with new value"""
if not self.env_file_path.exists():
# Ensure parent directory exists
self.env_file_path.parent.mkdir(parents=True, exist_ok=True)
self.env_file_path.touch()
# Read current .env content
@@ -333,11 +306,9 @@ class ConfigManager:
'BK_SETS_CONSOLIDATION': 'Enable set consolidation/grouping functionality',
'BK_SHOW_GRID_FILTERS': 'Show filter options on grids by default',
'BK_SHOW_GRID_SORT': 'Show sort options on grids by default',
'BK_SKIP_SPARE_PARTS': 'Skip importing spare parts when downloading sets from Rebrickable',
'BK_HIDE_SPARE_PARTS': 'Hide spare parts from parts lists (spare parts must still be in database)',
'BK_SKIP_SPARE_PARTS': 'Skip spare parts when importing sets',
'BK_USE_REMOTE_IMAGES': 'Use remote images from Rebrickable CDN instead of local',
'BK_STATISTICS_SHOW_CHARTS': 'Show collection growth charts on statistics page',
'BK_STATISTICS_DEFAULT_EXPANDED': 'Expand all statistics sections by default',
'BK_DARK_MODE': 'Enable dark mode theme'
'BK_STATISTICS_DEFAULT_EXPANDED': 'Expand all statistics sections by default'
}
return help_text.get(var_name, 'No help available for this variable')
+1 -5
View File
@@ -60,7 +60,7 @@ class BrickConfiguration(object):
if self.cast == bool and isinstance(value, str):
value = value.lower() in ('true', 'yes', '1')
# Static path fixup (legacy - only for paths with s: True flag)
# Static path fixup
if self.static_path and isinstance(value, str):
value = os.path.normpath(value)
@@ -70,10 +70,6 @@ class BrickConfiguration(object):
# Remove static prefix
value = value.removeprefix('static/')
# Normalize regular paths (not marked as static)
elif not self.static_path and isinstance(value, str) and ('FOLDER' in self.name or 'PATH' in self.name):
value = os.path.normpath(value)
# Type casting
if self.cast is not None:
self.value = self.cast(value)
+25 -81
View File
@@ -25,16 +25,17 @@ logger = logging.getLogger(__name__)
class IndividualMinifigure(RebrickableMinifigure):
# Queries
select_query: str = 'individual_minifigure/select/by_id'
light_query: str = 'individual_minifigure/select/light'
insert_query: str = 'individual_minifigure/insert'
# Delete an individual minifigure
# Delete a individual minifigure
def delete(self, /) -> None:
BrickSQL().executescript(
'individual_minifigure/delete',
'individual_minifigure/delete/individual_minifigure',
id=self.fields.id
)
# Import an individual minifigure into the database
# Import a individual minifigure into the database
def download(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
# Load the minifigure
if not self.load(socket, data, from_download=True):
@@ -66,10 +67,6 @@ class IndividualMinifigure(RebrickableMinifigure):
)
self.fields.purchase_location = purchase_location.fields.id if purchase_location else None
# Save purchase date and price
self.fields.purchase_date = data.get('purchase_date', None)
self.fields.purchase_price = data.get('purchase_price', None)
# Save quantity and description
self.fields.quantity = int(data.get('quantity', 1))
self.fields.description = data.get('description', '')
@@ -140,6 +137,7 @@ class IndividualMinifigure(RebrickableMinifigure):
# Download parts (elements) for this individual minifigure
def download_parts(self, socket: 'BrickSocket', /) -> bool:
"""Download minifigure parts using get_minifig_elements()"""
try:
# Check if we have cached parts data from load()
if hasattr(self, '_cached_parts_response'):
@@ -182,30 +180,20 @@ class IndividualMinifigure(RebrickableMinifigure):
from .rebrickable_part import RebrickablePart
if 'results' in response:
logger.debug('Processing {count} parts for minifigure {figure}'.format(
count=len(response["results"]),
figure=self.fields.figure
))
logger.debug(f'Processing {len(response["results"])} parts for minifigure {self.fields.figure}')
for idx, result in enumerate(response['results']):
part_num = result['part']['part_num']
color_id = result['color']['id']
logger.debug(
'Part {current}/{total}: {part_num} (color: {color_id}, quantity: {quantity})'.format(
current=idx+1,
total=len(response["results"]),
part_num=part_num,
color_id=color_id,
quantity=result["quantity"]
)
f'Part {idx+1}/{len(response["results"])}: {part_num} '
f'(color: {color_id}, quantity: {result["quantity"]})'
)
# Insert rebrickable part data first
part_data = RebrickablePart.from_rebrickable(result)
logger.debug('Rebrickable part data keys: {keys}'.format(
keys=list(part_data.keys())
))
logger.debug(f'Rebrickable part data keys: {list(part_data.keys())}')
# Insert into rebrickable_parts if not exists
BrickSQL().execute(
@@ -228,10 +216,7 @@ class IndividualMinifigure(RebrickableMinifigure):
).download()
except Exception as e:
logger.warning(
'Could not download image for part {part_num}: {error}'.format(
part_num=part_num,
error=e
)
f'Could not download image for part {part_num}: {e}'
)
# Insert into bricktracker_individual_minifigure_parts
@@ -244,9 +229,7 @@ class IndividualMinifigure(RebrickableMinifigure):
'element': result.get('element_id'),
'rebrickable_inventory': result['id'],
}
logger.debug('Individual part params: {params}'.format(
params=individual_part_params
))
logger.debug(f'Individual part params: {individual_part_params}')
BrickSQL().execute(
'individual_minifigure/part/insert',
@@ -254,13 +237,9 @@ class IndividualMinifigure(RebrickableMinifigure):
commit=False,
)
logger.debug('Successfully inserted all {count} parts'.format(
count=len(response["results"])
))
logger.debug(f'Successfully inserted all {len(response["results"])} parts')
else:
logger.warning('No results in parts response for minifigure {figure}'.format(
figure=self.fields.figure
))
logger.warning(f'No results in parts response for minifigure {self.fields.figure}')
# Clean up cached data
if hasattr(self, '_cached_parts_response'):
@@ -280,6 +259,7 @@ class IndividualMinifigure(RebrickableMinifigure):
# Insert the individual minifigure from Rebrickable
def insert_rebrickable_loose(self, /) -> None:
"""Insert rebrickable minifigure data (without set association)"""
# Insert the Rebrickable minifigure to the database
# Note: We override the parent's insert_rebrickable since we don't have a brickset
from .rebrickable_image import RebrickableImage
@@ -309,15 +289,10 @@ class IndividualMinifigure(RebrickableMinifigure):
BrickSet(), # Dummy set - not used since minifigure takes priority
minifigure=self,
).download()
logger.debug('Downloaded image for individual minifigure {figure}'.format(
figure=self.fields.figure
))
logger.debug(f'Downloaded image for individual minifigure {self.fields.figure}')
except Exception as e:
logger.warning(
'Could not download image for individual minifigure {figure}: {error}'.format(
figure=self.fields.figure,
error=e
)
f'Could not download image for individual minifigure {self.fields.figure}: {e}'
)
# Load the minifigure from Rebrickable
@@ -389,7 +364,7 @@ class IndividualMinifigure(RebrickableMinifigure):
figure,
**minifig_params
).read())
self.fields.name = minifig_response.get('name', "Minifigure {figure}".format(figure=figure))
self.fields.name = minifig_response.get('name', f"Minifigure {figure}")
# Use the minifig image from get_minifig() - this is the assembled minifig
self.fields.image = minifig_response.get('set_img_url')
@@ -401,10 +376,8 @@ class IndividualMinifigure(RebrickableMinifigure):
self.fields.number = 0
except Exception as e:
logger.warning('Could not fetch minifigure name: {error}'.format(
error=e
))
self.fields.name = "Minifigure {figure}".format(figure=figure)
logger.warning(f'Could not fetch minifigure name: {e}')
self.fields.name = f"Minifigure {figure}"
# Try to extract number anyway
try:
self.fields.number = int(figure.split('-')[1])
@@ -421,29 +394,7 @@ class IndividualMinifigure(RebrickableMinifigure):
# Store the parts data for later use in download
self._cached_parts_response = response
else:
raise NotFoundException('Minifigure {figure} has no parts in Rebrickable'.format(
figure=figure
))
# Download minifigure image during preview if not using remote images
if not from_download and not current_app.config['USE_REMOTE_IMAGES'] and self.fields.image:
from .rebrickable_image import RebrickableImage
from .set import BrickSet
try:
RebrickableImage(
BrickSet(),
minifigure=self,
).download()
logger.debug('Downloaded preview image for minifigure {figure}'.format(
figure=self.fields.figure
))
except Exception as e:
logger.warning(
'Could not download preview image for minifigure {figure}: {error}'.format(
figure=self.fields.figure,
error=e
)
)
raise NotFoundException(f'Minifigure {figure} has no parts in Rebrickable')
socket.emit('MINIFIGURE_LOADED', self.short(
from_download=from_download
@@ -485,7 +436,7 @@ class IndividualMinifigure(RebrickableMinifigure):
'figure': self.fields.figure,
}
# Select an individual minifigure by ID
# Select a individual minifigure by ID
def select_by_id(self, id: str, /) -> Self:
# Save the ID parameter
self.fields.id = id
@@ -493,11 +444,11 @@ class IndividualMinifigure(RebrickableMinifigure):
# Import status list here to get metadata columns
from .set_status_list import BrickSetStatusList
# Pass metadata columns to the query (using set tables which now handle all entities)
# Pass metadata columns to the query with correct table names for individual minifigures
context = {
'owners': BrickSetOwnerList.as_columns() if BrickSetOwnerList.list() else '',
'statuses': BrickSetStatusList.as_columns(all=True) if BrickSetStatusList.list(all=True) else '',
'tags': BrickSetTagList.as_columns() if BrickSetTagList.list() else '',
'owners': ', ' + BrickSetOwnerList.as_columns(table='bricktracker_individual_minifigure_owners') if BrickSetOwnerList.list() else '',
'statuses': ', ' + BrickSetStatusList.as_columns(table='bricktracker_individual_minifigure_statuses', all=True) if BrickSetStatusList.list(all=True) else '',
'tags': ', ' + BrickSetTagList.as_columns(table='bricktracker_individual_minifigure_tags') if BrickSetTagList.list() else '',
}
if not self.select(**context):
@@ -513,13 +464,6 @@ class IndividualMinifigure(RebrickableMinifigure):
def url(self, /) -> str:
return url_for('individual_minifigure.details', id=self.fields.id)
# String representation for debugging
def __repr__(self, /) -> str:
figure = getattr(self.fields, 'figure', 'unknown')
name = getattr(self.fields, 'name', 'Unknown')
qty = getattr(self.fields, 'quantity', 0)
return f'<IndividualMinifigure {figure} "{name}" qty:{qty}>'
# URL for updating quantity
def url_for_quantity(self, /) -> str:
return url_for('individual_minifigure.update_quantity', id=self.fields.id)
+10 -31
View File
@@ -3,9 +3,6 @@ from typing import Self
from .individual_minifigure import IndividualMinifigure
from .record_list import BrickRecordList
from .set_owner_list import BrickSetOwnerList
from .set_status_list import BrickSetStatusList
from .set_tag_list import BrickSetTagList
logger = logging.getLogger(__name__)
@@ -13,36 +10,28 @@ logger = logging.getLogger(__name__)
# Individual minifigures list
class IndividualMinifigureList(BrickRecordList[IndividualMinifigure]):
# Queries
all_query: str = 'individual_minifigure/list/all'
instances_by_figure_query: str = 'individual_minifigure/select/instances_by_figure'
using_storage_query: str = 'individual_minifigure/list/using_storage'
using_purchase_location_query: str = 'individual_minifigure/list/using_purchase_location'
without_storage_query: str = 'individual_minifigure/list/without_storage'
def __init__(self, /):
super().__init__()
# Load all individual minifigures
def all(self, /) -> Self:
# Prepare context with metadata columns
context = {
'owners': BrickSetOwnerList.as_columns() if BrickSetOwnerList.list() else 'NULL AS "no_owners"',
'statuses': BrickSetStatusList.as_columns(all=True) if BrickSetStatusList.list(all=True) else 'NULL AS "no_statuses"',
'tags': BrickSetTagList.as_columns() if BrickSetTagList.list() else 'NULL AS "no_tags"',
}
self.list(override_query=self.all_query, **context)
return self
# Load all individual instances of a specific minifigure figure
def instances_by_figure(self, figure: str, /) -> Self:
# Save the figure parameter
self.fields.figure = figure
# Prepare context with metadata columns (using consolidated metadata tables)
# Import metadata lists to get columns
from .set_owner_list import BrickSetOwnerList
from .set_status_list import BrickSetStatusList
from .set_tag_list import BrickSetTagList
# Prepare context with metadata columns
context = {
'owners': BrickSetOwnerList.as_columns() if BrickSetOwnerList.list() else 'NULL AS "no_owners"',
'statuses': BrickSetStatusList.as_columns(all=True) if BrickSetStatusList.list(all=True) else 'NULL AS "no_statuses"',
'tags': BrickSetTagList.as_columns() if BrickSetTagList.list() else 'NULL AS "no_tags"',
'owners': BrickSetOwnerList.as_columns(table='bricktracker_individual_minifigure_owners') if BrickSetOwnerList.list() else 'NULL AS "no_owners"',
'statuses': BrickSetStatusList.as_columns(table='bricktracker_individual_minifigure_statuses', all=True) if BrickSetStatusList.list(all=True) else 'NULL AS "no_statuses"',
'tags': BrickSetTagList.as_columns(table='bricktracker_individual_minifigure_tags') if BrickSetTagList.list() else 'NULL AS "no_tags"',
}
# Load the instances from the database
@@ -60,16 +49,6 @@ class IndividualMinifigureList(BrickRecordList[IndividualMinifigure]):
return self
# Load all individual minifigures using a specific purchase location
def using_purchase_location(self, purchase_location: 'BrickSetPurchaseLocation', /) -> Self:
# Save the purchase location parameter
self.fields.purchase_location = purchase_location.fields.id
# Load the minifigures from the database
self.list(override_query=self.using_purchase_location_query)
return self
# Load all individual minifigures without storage
def without_storage(self, /) -> Self:
# Load minifigures with no storage
-917
View File
@@ -1,917 +0,0 @@
import logging
import os
import traceback
from typing import Any, Self, TYPE_CHECKING
from urllib.parse import urlparse
from uuid import uuid4
from flask import current_app, url_for
import requests
from shutil import copyfileobj
from .exceptions import NotFoundException, DatabaseException, ErrorException
from .record import BrickRecord
from .set_owner_list import BrickSetOwnerList
from .set_purchase_location_list import BrickSetPurchaseLocationList
from .set_storage_list import BrickSetStorageList
from .set_tag_list import BrickSetTagList
from .sql import BrickSQL
if TYPE_CHECKING:
from .socket import BrickSocket
logger = logging.getLogger(__name__)
# Individual part (standalone, not associated with a set or minifigure)
class IndividualPart(BrickRecord):
# Queries
select_query: str = 'individual_part/select/by_id'
insert_query: str = 'individual_part/insert'
update_query: str = 'individual_part/update'
def __init__(
self,
/,
*,
record: Any | None = None
):
super().__init__()
# Ingest the record if it has one
if record is not None:
self.ingest(record)
# Select a specific individual part by UUID
def select_by_id(self, id: str, /) -> Self:
from .set_owner_list import BrickSetOwnerList
from .set_status_list import BrickSetStatusList
from .set_tag_list import BrickSetTagList
self.fields.id = id
if not self.select(
override_query=self.select_query,
owners=BrickSetOwnerList.as_columns(),
statuses=BrickSetStatusList.as_columns(all=True),
tags=BrickSetTagList.as_columns(),
):
raise NotFoundException(
'Individual part with id "{id}" not found'.format(id=id)
)
return self
# Delete an individual part
def delete(self, /) -> None:
sql = BrickSQL()
sql.executescript(
'individual_part/delete',
id=self.fields.id
)
sql.commit()
# Generate HTML ID for form elements
def html_id(self, prefix: str | None = None, /) -> str:
components: list[str] = ['individual-part']
if prefix is not None:
components.append(prefix)
components.append(self.fields.part)
components.append(str(self.fields.color))
components.append(self.fields.id)
return '-'.join(components)
# URL for quantity update
def url_for_quantity(self, /) -> str:
return url_for('individual_part.update_quantity', id=self.fields.id)
# URL for description update
def url_for_description(self, /) -> str:
return url_for('individual_part.update_description', id=self.fields.id)
# URL for problem (missing/damaged) update
def url_for_problem(self, problem_type: str, /) -> str:
if problem_type == 'missing':
return url_for('individual_part.update_missing', id=self.fields.id)
elif problem_type == 'damaged':
return url_for('individual_part.update_damaged', id=self.fields.id)
else:
raise ValueError(f'Invalid problem type: {problem_type}')
# URL for checked status update
def url_for_checked(self, /) -> str:
return url_for('individual_part.update_checked', id=self.fields.id)
# URL for purchase date update
def url_for_purchase_date(self, /) -> str:
return url_for('individual_part.update_purchase_date', id=self.fields.id)
# URL for purchase price update
def url_for_purchase_price(self, /) -> str:
return url_for('individual_part.update_purchase_price', id=self.fields.id)
# URL for this part's detail page
def url(self, /) -> str:
return url_for('individual_part.details', id=self.fields.id)
def url_for_delete(self, /) -> str:
return url_for('individual_part.delete_part', id=self.fields.id)
def url_for_image(self, /) -> str:
if current_app.config.get('USE_REMOTE_IMAGES', False):
if hasattr(self.fields, 'image') and self.fields.image:
return self.fields.image
else:
return current_app.config.get('REBRICKABLE_IMAGE_NIL', '')
else:
from .rebrickable_image import RebrickableImage
if hasattr(self.fields, 'image') and self.fields.image:
image_id, _ = os.path.splitext(os.path.basename(urlparse(self.fields.image).path))
if image_id:
return RebrickableImage.static_url(image_id, 'PARTS_FOLDER')
return RebrickableImage.static_url(RebrickableImage.nil_name(), 'PARTS_FOLDER')
# String representation for debugging
def __repr__(self, /) -> str:
"""String representation for debugging"""
part_id = getattr(self.fields, 'part', 'unknown')
color_id = getattr(self.fields, 'color', 'unknown')
qty = getattr(self.fields, 'quantity', 0)
return f'<IndividualPart {part_id} color:{color_id} qty:{qty}>'
# Get or fetch color information from rebrickable_colors table
@staticmethod
def get_or_fetch_color(color_id: int, /) -> dict[str, Any] | None:
sql = BrickSQL()
# Check if color exists in cache
result = sql.fetchone('rebrickable_colors/select/by_color_id', parameters={'color_id': color_id})
if result:
# Color found in cache
return {
'color_id': result[0],
'name': result[1],
'rgb': result[2],
'is_trans': result[3],
'bricklink_color_id': result[4],
'bricklink_color_name': result[5]
}
# Color not in cache, fetch from API
try:
import rebrick
import json
rebrick.init(current_app.config['REBRICKABLE_API_KEY'])
color_response = rebrick.lego.get_color(color_id)
color_data = json.loads(color_response.read())
# Extract BrickLink color info
bricklink_color_id = None
bricklink_color_name = None
if 'external_ids' in color_data and 'BrickLink' in color_data['external_ids']:
bricklink_info = color_data['external_ids']['BrickLink']
if 'ext_ids' in bricklink_info and bricklink_info['ext_ids']:
bricklink_color_id = bricklink_info['ext_ids'][0]
if 'ext_descrs' in bricklink_info and bricklink_info['ext_descrs']:
bricklink_color_name = bricklink_info['ext_descrs'][0][0] if bricklink_info['ext_descrs'][0] else None
# Store in cache
sql.execute('rebrickable_colors/insert', parameters={
'color_id': color_data['id'],
'name': color_data['name'],
'rgb': color_data.get('rgb'),
'is_trans': color_data.get('is_trans', False),
'bricklink_color_id': bricklink_color_id,
'bricklink_color_name': bricklink_color_name
})
sql.connection.commit()
logger.info('Cached color {color_id} ({color_name}) with BrickLink ID {bricklink_id}'.format(
color_id=color_id,
color_name=color_data["name"],
bricklink_id=bricklink_color_id
))
return {
'color_id': color_data['id'],
'name': color_data['name'],
'rgb': color_data.get('rgb'),
'is_trans': color_data.get('is_trans', False),
'bricklink_color_id': bricklink_color_id,
'bricklink_color_name': bricklink_color_name
}
except Exception as e:
logger.warning('Could not fetch color {color_id} from API: {error}'.format(
color_id=color_id,
error=e
))
return None
# Download image for this part
def download_image(self, image_url: str, /, *, image_filename: str | None = None) -> None:
if not image_url:
return
# Use provided filename or extract from URL
if image_filename:
image_id = image_filename
else:
image_id, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
if not image_id:
return
# Build path (same pattern as RebrickableImage)
parts_folder = current_app.config['PARTS_FOLDER']
extension = 'jpg' # Everything is saved as jpg
# If folder is an absolute path (starts with /), use it directly
# Otherwise, make it relative to app root (current_app.root_path)
if parts_folder.startswith('/'):
base_path = parts_folder
else:
base_path = os.path.join(current_app.root_path, parts_folder)
path = os.path.join(base_path, f'{image_id}.{extension}')
# Avoid downloading if file exists
if os.path.exists(path):
return
# Create directory if it doesn't exist
os.makedirs(os.path.dirname(path), exist_ok=True)
# Download the image
try:
response = requests.get(image_url, stream=True)
if response.ok:
with open(path, 'wb') as f:
copyfileobj(response.raw, f)
logger.info('Downloaded image to {path}'.format(path=path))
except Exception as e:
logger.warning('Could not download image from {url}: {error}'.format(
url=image_url,
error=e
))
# Load available colors for a part
def load_colors(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
# Check if individual parts are disabled
if current_app.config.get('DISABLE_INDIVIDUAL_PARTS', False):
socket.fail(message='Individual parts system is disabled.')
return False
try:
# Extract part number
part_num = str(data.get('part', '')).strip()
if not part_num:
raise ErrorException('Part number is required')
# Fetch available colors from Rebrickable
import rebrick
import json
rebrick.init(current_app.config['REBRICKABLE_API_KEY'])
# Setup progress tracking
socket.progress_count = 0
socket.progress_total = 2 # Fetch part info + fetch colors
try:
# Get part information for the name
socket.auto_progress(message='Fetching part information')
part_response = rebrick.lego.get_part(part_num)
part_data = json.loads(part_response.read())
part_name = part_data.get('name', part_num)
# Get all available colors for this part
socket.auto_progress(message='Fetching available colors')
colors_response = rebrick.lego.get_part_colors(part_num)
colors_data = json.loads(colors_response.read())
# Extract the results
colors = colors_data.get('results', [])
if not colors:
raise ErrorException(f'No colors found for part {part_num}')
# Download images locally if USE_REMOTE_IMAGES is False
if not current_app.config.get('USE_REMOTE_IMAGES', False):
# Add image downloads to progress
socket.progress_total += len(colors)
for color in colors:
image_url = color.get('part_img_url', '')
element_id = color.get('elements', [])
# Use first element_id if available, otherwise extract from URL
if element_id and len(element_id) > 0:
image_filename = str(element_id[0])
else:
# Fallback: extract from URL
image_filename = None
if image_url:
image_filename, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
if image_url and image_filename:
socket.auto_progress(message='Downloading image for {color}'.format(
color=color.get("color_name", "color")
))
try:
self.download_image(image_url, image_filename=image_filename)
except Exception as e:
logger.warning('Could not download image for part {part_num} color {color_id}: {error}'.format(
part_num=part_num,
color_id=color.get("color_id"),
error=e
))
# Emit the part colors loaded event
logger.info('Emitting {count} colors for part {part_num} ({part_name})'.format(
count=len(colors),
part_num=part_num,
part_name=part_name
))
socket.emit(
'PART_COLORS_LOADED',
{
'part': part_num,
'part_name': part_name,
'colors': colors,
'count': len(colors)
}
)
logger.info('Successfully loaded {count} colors for part {part_num}'.format(
count=len(colors),
part_num=part_num
))
return True
except Exception as e:
error_msg = str(e)
# Provide helpful error message for printed/decorated parts
if '404' in error_msg or 'Not Found' in error_msg:
# Check if this might be a printed part (has letters/pattern code)
base_part = ''.join(c for c in part_num if c.isdigit())
if base_part and base_part != part_num:
raise ErrorException(
'Part {part_num} not found in Rebrickable. This appears to be a printed/decorated part. '
'Try searching for the base part number: {base_part}'.format(
part_num=part_num,
base_part=base_part
)
)
else:
raise ErrorException(
'Part {part_num} not found in Rebrickable. '
'Please verify the part number is correct.'.format(
part_num=part_num
)
)
else:
raise ErrorException(
'Could not fetch colors for part {part_num}: {error}'.format(
part_num=part_num,
error=error_msg
)
)
except Exception as e:
error_msg = str(e)
socket.fail(message=f'Could not load part colors: {error_msg}')
if not isinstance(e, (NotFoundException, ErrorException)):
logger.debug(traceback.format_exc())
return False
# Add a new individual part
def add(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
# Check if individual parts are disabled
if current_app.config.get('DISABLE_INDIVIDUAL_PARTS', False):
socket.fail(message='Individual parts system is disabled.')
return False
try:
# Reset progress
socket.progress_count = 0
socket.progress_total = 3
socket.auto_progress(message='Validating part and color')
# Extract data
part_num = str(data.get('part', '')).strip()
color_id = int(data.get('color', -1))
quantity = int(data.get('quantity', 1))
if not part_num:
raise ErrorException('Part number is required')
if color_id < 0:
raise ErrorException('Valid color ID is required')
if quantity <= 0:
raise ErrorException('Quantity must be greater than 0')
# Check if color info was pre-loaded (from load_colors)
color_data = data.get('color_info', None)
part_name = data.get('part_name', None)
# Validate part+color exists in rebrickable_parts
# If not, fetch from Rebrickable or use pre-loaded data and insert
sql = BrickSQL()
result = sql.fetchone('rebrickable_parts/check_exists', parameters={'part': part_num, 'color_id': color_id})
exists = result[0] > 0
# Store image URL for downloading later
image_url = None
if not exists:
# Fetch full color information (with BrickLink mapping)
socket.auto_progress(message='Fetching color information')
full_color_info = IndividualPart.get_or_fetch_color(color_id)
# If we have pre-loaded color data, use it; otherwise fetch from Rebrickable
if color_data and part_name:
# Use pre-loaded data from get_part_colors() response
socket.auto_progress(message='Using cached part info')
image_url = color_data.get('part_img_url', '')
# Extract image_id from element_id or URL
element_ids = color_data.get('elements', [])
if element_ids and len(element_ids) > 0:
image_id = str(element_ids[0])
elif image_url:
image_id, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
else:
image_id = None
# Insert into rebrickable_parts using the pre-loaded data
sql.execute('rebrickable_parts/insert_with_preloaded_data', parameters={
'part': part_num,
'color_id': color_id,
'color_name': color_data.get('color_name', ''),
'color_rgb': full_color_info.get('rgb') if full_color_info else None,
'color_transparent': full_color_info.get('is_trans') if full_color_info else None,
'bricklink_color_id': full_color_info.get('bricklink_color_id') if full_color_info else None,
'bricklink_color_name': full_color_info.get('bricklink_color_name') if full_color_info else None,
'name': part_name,
'image': image_url,
'image_id': image_id,
'url': current_app.config['REBRICKABLE_LINK_PART_PATTERN'].format(part=part_num, color=color_id)
})
else:
# Fetch from Rebrickable (fallback for old workflow)
socket.auto_progress(message='Fetching part info from Rebrickable')
import rebrick
import json
# Initialize rebrick with API key
rebrick.init(current_app.config['REBRICKABLE_API_KEY'])
try:
# Get part information
part_info = json.loads(rebrick.lego.get_part(part_num).read())
# Get color information (this also caches it in rebrickable_colors)
# full_color_info already fetched above, but get again to be sure
if not full_color_info:
full_color_info = IndividualPart.get_or_fetch_color(color_id)
# Get part+color specific info (for the image and element_id)
part_color_info = json.loads(rebrick.lego.get_part_color(part_num, color_id).read())
# Get image URL
image_url = part_color_info.get('part_img_url', part_info.get('part_img_url', ''))
# Extract image_id from element_ids or URL
element_ids = part_color_info.get('elements', [])
if element_ids and len(element_ids) > 0:
image_id = str(element_ids[0])
elif image_url:
image_id, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
else:
image_id = None
# Insert into rebrickable_parts with BrickLink color info
sql.execute('rebrickable_parts/insert_with_preloaded_data', parameters={
'part': part_info['part_num'],
'color_id': full_color_info['color_id'] if full_color_info else color_id,
'color_name': full_color_info['name'] if full_color_info else '',
'color_rgb': full_color_info['rgb'] if full_color_info else None,
'color_transparent': full_color_info['is_trans'] if full_color_info else None,
'bricklink_color_id': full_color_info.get('bricklink_color_id') if full_color_info else None,
'bricklink_color_name': full_color_info.get('bricklink_color_name') if full_color_info else None,
'name': part_info['name'],
'image': image_url,
'image_id': image_id,
'url': part_info['part_url']
})
except Exception as e:
error_msg = str(e)
# Provide helpful error message for printed/decorated parts
if '404' in error_msg or 'Not Found' in error_msg:
base_part = ''.join(c for c in part_num if c.isdigit())
if base_part and base_part != part_num:
raise ErrorException(
f'Part {part_num} with color {color_id} not found in Rebrickable. '
f'This appears to be a printed/decorated part. '
f'Try using the base part number: {base_part}'
)
else:
raise ErrorException(
f'Part {part_num} with color {color_id} not found in Rebrickable. '
f'Please verify the part number is correct.'
)
else:
raise ErrorException(
f'Part {part_num} with color {color_id} not found in Rebrickable: {error_msg}'
)
else:
# Part already exists in rebrickable_parts, get the image URL
result = sql.fetchone('rebrickable_parts/select/image_by_part_color', parameters={'part': part_num, 'color_id': color_id})
if result and result[0]:
image_url = result[0]
# Generate UUID and insert individual part
socket.auto_progress(message='Adding part to collection')
part_id = str(uuid4())
# Get storage and purchase location
storage = BrickSetStorageList.get(
data.get('storage', ''),
allow_none=True
)
purchase_location = BrickSetPurchaseLocationList.get(
data.get('purchase_location', ''),
allow_none=True
)
# Set fields
self.fields.id = part_id
self.fields.part = part_num
self.fields.color = color_id
self.fields.quantity = quantity
self.fields.missing = 0
self.fields.damaged = 0
self.fields.checked = 0
self.fields.description = data.get('description', '')
self.fields.lot_id = None # Single parts are not in a lot
self.fields.storage = storage.fields.id if storage else None
self.fields.purchase_location = purchase_location.fields.id if purchase_location else None
self.fields.purchase_date = data.get('purchase_date', None)
self.fields.purchase_price = data.get('purchase_price', None)
# Insert into database
self.insert(commit=False, no_defer=True)
# Save owners
owners: list[str] = list(data.get('owners', []))
for owner_id in owners:
owner = BrickSetOwnerList.get(owner_id)
owner.update_individual_part_state(self, state=True)
# Save tags
tags: list[str] = list(data.get('tags', []))
for tag_id in tags:
tag = BrickSetTagList.get(tag_id)
tag.update_individual_part_state(self, state=True)
# Commit
sql.connection.commit()
# Download image if we have a URL
if image_url:
try:
self.download_image(image_url)
except Exception as e:
# Don't fail the whole operation if image download fails
logger.warning('Could not download image for part {part_num} color {color_id}: {error}'.format(
part_num=part_num,
color_id=color_id,
error=e
))
# Get color name for success message
color_name = 'Unknown'
if color_data and color_data.get('color_name'):
color_name = color_data.get('color_name')
elif full_color_info and full_color_info.get('name'):
color_name = full_color_info.get('name')
# Generate link to part details page
part_url = url_for('part.details', part=part_num, color=color_id)
socket.complete(
message=f'Successfully added part {part_num} in {color_name} (<a href="{part_url}">View details</a>)'
)
return True
except Exception as e:
error_msg = str(e)
if 'Individual parts system is disabled' in error_msg:
socket.fail(message=error_msg)
else:
socket.fail(
message=f'Could not add individual part: {error_msg}'
)
if not isinstance(e, (NotFoundException, ErrorException)):
logger.debug(traceback.format_exc())
return False
# Create multiple individual parts (bulk mode - no lot)
def create_bulk(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
"""
Create multiple individual parts without creating a lot.
Expected data format:
{
'cart': [
{
'part': '3001',
'part_name': 'Brick 2 x 4',
'color_id': 1,
'color_name': 'White',
'quantity': 10,
'color_info': {...}
},
...
],
'storage': 'storage_id',
'purchase_location': 'purchase_location_id',
'purchase_date': timestamp,
'purchase_price': 0.0,
'owners': ['owner_id1', ...],
'tags': ['tag_id1', ...]
}
"""
try:
# Validate cart data
cart = data.get('cart', [])
if not cart or not isinstance(cart, list):
raise ErrorException('Cart is empty or invalid')
socket.auto_progress(
message=f'Adding {len(cart)} individual parts',
increment_total=True
)
# Get storage
from .set_list import BrickSetStorageList, BrickSetPurchaseLocationList, BrickSetOwnerList, BrickSetTagList
storage = BrickSetStorageList.get(
data.get('storage', ''),
allow_none=True
)
storage_id = storage.fields.id if storage else None
# Get purchase location
purchase_location = BrickSetPurchaseLocationList.get(
data.get('purchase_location', ''),
allow_none=True
)
purchase_location_id = purchase_location.fields.id if purchase_location else None
# Get purchase info
purchase_date = data.get('purchase_date', None)
purchase_price = data.get('purchase_price', None)
# Get owners and tags
owners: list[str] = list(data.get('owners', []))
tags: list[str] = list(data.get('tags', []))
# Add all parts from cart
parts_added = 0
for idx, cart_item in enumerate(cart):
part_num = cart_item.get('part')
color_id = cart_item.get('color_id')
quantity = cart_item.get('quantity', 1)
color_info = cart_item.get('color_info', {})
socket.auto_progress(
message=f'Adding part {idx + 1}/{len(cart)}: {part_num} in {cart_item.get("color_name", "unknown color")}',
increment_total=True
)
# Create individual part with no lot_id
part_uuid = str(uuid4())
# Ensure color exists and get full color info (including RGB)
full_color_info = IndividualPart.get_or_fetch_color(color_id)
# Insert the part
sql = BrickSQL()
# Ensure part/color combination exists in rebrickable_parts (same as lot creation)
try:
# Check if part exists
result = sql.fetchone('rebrickable_parts/check_exists', parameters={'part': part_num, 'color_id': color_id})
exists = result[0] > 0
if not exists:
# Insert part data
part_name = cart_item.get('part_name', '')
color_name = cart_item.get('color_name', '')
image_url = color_info.get('part_img_url', '')
# Extract image_id from element_ids or URL
element_ids = color_info.get('elements', [])
if element_ids and len(element_ids) > 0:
image_id = str(element_ids[0])
elif image_url:
image_id, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
else:
image_id = None
# Use full_color_info for RGB and transparency data (same as single-part add)
sql.execute('rebrickable_parts/insert_part_color', parameters={
'part': part_num,
'name': part_name,
'color_id': color_id,
'color_name': color_name,
'color_rgb': full_color_info.get('rgb') if full_color_info else '',
'color_transparent': full_color_info.get('is_trans') if full_color_info else False,
'image': image_url,
'image_id': image_id,
'url': current_app.config['REBRICKABLE_LINK_PART_PATTERN'].format(part=part_num, color=color_id),
'bricklink_color_id': full_color_info.get('bricklink_color_id') if full_color_info else None,
'bricklink_color_name': full_color_info.get('bricklink_color_name') if full_color_info else None
})
except Exception as e:
logger.warning('Could not ensure part data for {part_num}/{color_id}: {error}'.format(
part_num=part_num,
color_id=color_id,
error=e
))
# Insert individual part
sql.execute(
'individual_part/insert',
parameters={
'id': part_uuid,
'part': part_num,
'color': color_id,
'quantity': quantity,
'lot_id': None, # No lot - this is bulk individual parts mode
'storage': storage_id,
'purchase_location': purchase_location_id,
'purchase_date': purchase_date,
'purchase_price': purchase_price,
'description': None,
'missing': 0,
'damaged': 0,
'checked': False
}
)
# Add owners
for owner_id in owners:
owner = BrickSetOwnerList.get(owner_id)
if owner:
sql.execute(
'individual_part/metadata/owner/insert',
parameters={
'part_id': part_uuid,
'owner_id': owner_id
}
)
# Add tags
for tag_id in tags:
tag = BrickSetTagList.get(tag_id)
if tag:
sql.execute(
'individual_part/metadata/tag/insert',
parameters={
'part_id': part_uuid,
'tag_id': tag_id
}
)
# Download part image if available
image_url = color_info.get('part_img_url', '')
if image_url:
try:
self.download_image(image_url)
except Exception as e:
# Don't fail the whole operation if image download fails
logger.warning('Could not download image for part {part_num} color {color_id}: {error}'.format(
part_num=part_num,
color_id=color_id,
error=e
))
parts_added += 1
# Commit all changes
sql = BrickSQL()
sql.commit()
socket.auto_progress(
message=f'Successfully added {parts_added} individual parts',
increment_total=True
)
# Generate link to individual parts list
from flask import url_for
parts_url = url_for('individual_part.list')
# Send completion with message and link
socket.complete(
message='Successfully added {count} individual parts. <a href="{url}">View individual parts</a>'.format(
count=parts_added,
url=parts_url
),
parts_added=parts_added
)
return True
except ErrorException as error:
socket.fail(message=str(error))
return False
except Exception as error:
logger.error('Failed to create bulk individual parts: {error}'.format(error=error))
logger.error(traceback.format_exc())
socket.fail(message='Failed to add individual parts: {error}'.format(error=str(error)))
return False
# Update a field
def update_field(self, field: str, value: Any, /) -> Self:
setattr(self.fields, field, value)
# Use a specific update query for each field
sql = BrickSQL()
sql.execute_and_commit('individual_part/update/field', parameters={
'id': self.fields.id,
'value': value
}, field=field)
return self
# Update problem count (missing/damaged)
def update_problem(self, problem: str, data: dict[str, Any], /) -> int:
# Handle both 'value' key and 'amount' key
amount: str | int = data.get('value', data.get('amount', '')) # type: ignore
# We need a positive integer
try:
if amount == '':
amount = 0
amount = int(amount)
if amount < 0:
amount = 0
except Exception:
raise ErrorException(f'"{amount}" is not a valid integer')
if amount < 0:
raise ErrorException('Cannot set a negative amount')
setattr(self.fields, problem, amount)
BrickSQL().execute_and_commit(
f'individual_part/update/{problem}',
parameters={
'id': self.fields.id,
problem: amount
}
)
return amount
# Update checked status
def update_checked(self, data: dict[str, Any], /) -> bool:
# Handle both direct 'checked' key and changer.js 'value' key format
if data:
checked = data.get('checked', data.get('value', False))
else:
checked = False
checked = bool(checked)
self.fields.checked = 1 if checked else 0
BrickSQL().execute_and_commit(
'individual_part/update/checked',
parameters={
'id': self.fields.id,
'checked': self.fields.checked
}
)
return checked
-100
View File
@@ -1,100 +0,0 @@
import logging
from typing import Self, TYPE_CHECKING
from .record_list import BrickRecordList
from .individual_part import IndividualPart
if TYPE_CHECKING:
from .set_storage import BrickSetStorage
logger = logging.getLogger(__name__)
# List of individual parts
class IndividualPartList(BrickRecordList):
# Queries
list_query: str = 'individual_part/list/all'
by_part_query: str = 'individual_part/list/by_part'
by_color_query: str = 'individual_part/list/by_color'
by_part_and_color_query: str = 'individual_part/list/by_part_and_color'
by_storage_query: str = 'individual_part/list/by_storage'
using_storage_query: str = 'individual_part/list/using_storage'
using_purchase_location_query: str = 'individual_part/list/using_purchase_location'
without_storage_query: str = 'individual_part/list/without_storage'
problem_query: str = 'individual_part/list/problem'
# Get all individual parts
def all(self, /) -> Self:
self.list(override_query=self.list_query)
return self
# Get individual parts by part number
def by_part(self, part: str, /) -> Self:
self.fields.part = part
self.list(override_query=self.by_part_query)
return self
# Get individual parts by color
def by_color(self, color_id: int, /) -> Self:
self.fields.color = color_id
self.list(override_query=self.by_color_query)
return self
# Get individual parts by part number and color
def by_part_and_color(self, part: str, color_id: int, /) -> Self:
self.fields.part = part
self.fields.color = color_id
self.list(override_query=self.by_part_and_color_query)
return self
# Get individual parts by storage location
def by_storage(self, storage: 'BrickSetStorage', /) -> Self:
self.fields.storage = storage.fields.id
self.list(override_query=self.by_storage_query)
return self
# Get individual parts using a specific storage location
def using_storage(self, storage: 'BrickSetStorage', /) -> Self:
self.fields.storage = storage.fields.id
self.list(override_query=self.using_storage_query)
return self
# Get individual parts using a specific purchase location
def using_purchase_location(self, purchase_location: 'BrickSetPurchaseLocation', /) -> Self:
self.fields.purchase_location = purchase_location.fields.id
self.list(override_query=self.using_purchase_location_query)
return self
# Get individual parts without storage
def without_storage(self, /) -> Self:
self.list(override_query=self.without_storage_query)
return self
# Get individual parts with problems (missing or damaged)
def with_problems(self, /) -> Self:
self.list(override_query=self.problem_query)
return self
# Base individual part list
def list(
self,
/,
*,
override_query: str | None = None,
order: str | None = None,
limit: int | None = None,
**context,
) -> None:
# Load the individual parts from the database
for record in super().select(
override_query=override_query,
order=order,
limit=limit,
**context
):
individual_part = IndividualPart(record=record)
self.records.append(individual_part)
# Set the record class
def set_record_class(self, /) -> None:
self.record_class = IndividualPart
-302
View File
@@ -1,302 +0,0 @@
import logging
import os
import traceback
from datetime import datetime
from typing import Any, Self, TYPE_CHECKING
from urllib.parse import urlparse
from uuid import uuid4
from flask import (
current_app,
url_for,
)
from .exceptions import NotFoundException, DatabaseException, ErrorException
from .individual_part import IndividualPart
from .record import BrickRecord, format_timestamp
from .set_owner_list import BrickSetOwnerList
from .set_purchase_location_list import BrickSetPurchaseLocationList
from .set_storage_list import BrickSetStorageList
from .set_tag_list import BrickSetTagList
from .sql import BrickSQL
if TYPE_CHECKING:
from .socket import BrickSocket
logger = logging.getLogger(__name__)
# Individual part lot (collection/batch of individual parts added together)
class IndividualPartLot(BrickRecord):
# Queries
select_query: str = 'individual_part_lot/select/by_id'
insert_query: str = 'individual_part_lot/insert'
def __init__(
self,
/,
*,
record: Any | None = None
):
super().__init__()
# Ingest the record if it has one
if record is not None:
self.ingest(record)
# Select a specific lot by UUID
def select_by_id(self, id: str, /) -> Self:
from .set_owner_list import BrickSetOwnerList
from .set_tag_list import BrickSetTagList
self.fields.id = id
if not self.select(
override_query=self.select_query,
owners=BrickSetOwnerList.as_columns(),
tags=BrickSetTagList.as_columns(),
# Note: Part lots don't have statuses (by design)
# Statuses are meant for tracking set completion/verification, which doesn't apply
# to loose part collections. Individual parts within lots can still be marked as
# missing/damaged/checked through the parts inventory system.
):
raise NotFoundException(
'Individual part lot with id "{id}" not found'.format(id=id)
)
return self
# Delete a lot and all its parts
def delete(self, /) -> None:
BrickSQL().executescript(
'individual_part_lot/delete',
id=self.fields.id
)
# Get the URL for this lot
def url(self, /) -> str:
return url_for('individual_part.lot_details', lot_id=self.fields.id)
# String representation for debugging
def __repr__(self, /) -> str:
name = getattr(self.fields, 'name', 'Unnamed') or 'Unnamed'
lot_id = getattr(self.fields, 'id', 'unknown')
# Try to get part_count if available (from optimized query)
part_count = getattr(self.fields, 'part_count', '?')
return f'<IndividualPartLot "{name}" ({part_count} parts) id:{lot_id[:8]}...>'
# Format created date
def created_date_formatted(self, /) -> str:
return format_timestamp(self.fields.created_date)
# Format purchase date
def purchase_date_formatted(self, /) -> str:
return format_timestamp(self.fields.purchase_date)
# Format purchase price
def purchase_price(self, /) -> str:
from flask import current_app
if self.fields.purchase_price is not None:
return '{price}{currency}'.format(
price=self.fields.purchase_price,
currency=current_app.config['PURCHASE_CURRENCY']
)
else:
return ''
# Get all parts in this lot
def parts(self, /) -> list['IndividualPart']:
sql = BrickSQL()
parts_data = sql.fetchall('individual_part_lot/list/parts', parameters={'lot_id': self.fields.id})
# Convert to list of IndividualPart objects using ingest()
return [IndividualPart(record=record) for record in parts_data]
# Get total quantity of all parts in this lot
def total_quantity(self, /) -> int:
parts = self.parts()
return sum(part.fields.quantity for part in parts)
# Create a new lot with parts from cart
def create(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
"""
Create a new individual part lot with multiple parts.
Expected data format:
{
'cart': [
{
'part': '3001',
'part_name': 'Brick 2 x 4',
'color_id': 1,
'color_name': 'White',
'quantity': 10,
'color_info': {...}
},
...
],
'name': 'Optional lot name',
'description': 'Optional lot description',
'storage': 'storage_id',
'purchase_location': 'purchase_location_id',
'purchase_date': timestamp,
'purchase_price': 0.0,
'owners': ['owner_id1', ...],
'tags': ['tag_id1', ...]
}
"""
try:
# Validate cart data
cart = data.get('cart', [])
if not cart or not isinstance(cart, list):
raise ErrorException('Cart is empty or invalid')
socket.auto_progress(
message=f'Creating lot with {len(cart)} parts',
increment_total=True
)
# Generate UUID for the lot
lot_id = str(uuid4())
self.fields.id = lot_id
# Set lot metadata
self.fields.name = data.get('name', None)
self.fields.description = data.get('description', None)
self.fields.created_date = datetime.now().timestamp()
# Get storage
storage = BrickSetStorageList.get(
data.get('storage', ''),
allow_none=True
)
self.fields.storage = storage.fields.id if storage else None
# Get purchase location
purchase_location = BrickSetPurchaseLocationList.get(
data.get('purchase_location', ''),
allow_none=True
)
self.fields.purchase_location = purchase_location.fields.id if purchase_location else None
# Set purchase info
self.fields.purchase_date = data.get('purchase_date', None)
self.fields.purchase_price = data.get('purchase_price', None)
# Insert the lot record
socket.auto_progress(
message='Inserting lot into database',
increment_total=True
)
self.insert(commit=False)
# Commit the lot so parts can reference it
sql = BrickSQL()
sql.commit()
# Save owners using the metadata update methods
owners: list[str] = list(data.get('owners', []))
for owner_id in owners:
owner = BrickSetOwnerList.get(owner_id)
if owner:
owner.update_individual_part_lot_state(self, state=True, commit=False)
# Save tags using the metadata update methods
tags: list[str] = list(data.get('tags', []))
for tag_id in tags:
tag = BrickSetTagList.get(tag_id)
if tag:
tag.update_individual_part_lot_state(self, state=True, commit=False)
# Add all parts from cart
socket.auto_progress(
message=f'Adding {len(cart)} parts to lot',
increment_total=True
)
for idx, cart_item in enumerate(cart):
part_num = cart_item.get('part')
color_id = cart_item.get('color_id')
quantity = cart_item.get('quantity', 1)
color_info = cart_item.get('color_info', {})
socket.auto_progress(
message=f'Adding part {idx + 1}/{len(cart)}: {part_num} in {cart_item.get("color_name", "unknown color")}',
increment_total=True
)
# Create individual part with lot_id
part_uuid = str(uuid4())
sql = BrickSQL()
# Ensure color and part/color combination exist in rebrickable tables
IndividualPart.get_or_fetch_color(color_id)
part_name = cart_item.get('part_name', '')
color_name = cart_item.get('color_name', '')
image_url = color_info.get('part_img_url', '')
# Extract image_id from element_ids or URL
element_ids = color_info.get('elements', [])
if element_ids and len(element_ids) > 0:
image_id = str(element_ids[0])
elif image_url:
image_id, _ = os.path.splitext(os.path.basename(urlparse(image_url).path))
else:
image_id = None
sql.execute('rebrickable_parts/insert_part_color', parameters={
'part': part_num,
'name': part_name,
'color_id': color_id,
'color_name': color_name,
'color_rgb': color_info.get('rgb', ''),
'color_transparent': color_info.get('is_trans', False),
'image': image_url,
'image_id': image_id,
'url': current_app.config['REBRICKABLE_LINK_PART_PATTERN'].format(part=part_num, color=color_id),
'bricklink_color_id': color_info.get('bricklink_color_id', None),
'bricklink_color_name': color_info.get('bricklink_color_name', None)
})
# Commit so the foreign key constraint can be satisfied
sql.commit()
# Now insert the part with lot_id (NO individual metadata - inherited from lot)
sql.execute('individual_part/insert_with_lot', parameters={
'id': part_uuid,
'part': part_num,
'color': color_id,
'quantity': quantity,
'lot_id': lot_id
})
# Commit all changes
socket.auto_progress(
message='Committing changes to database',
increment_total=True
)
sql.commit()
socket.auto_progress(
message=f'Lot created successfully with {len(cart)} parts',
increment_total=True
)
# Complete with success message and lot URL
lot_url = self.url()
socket.complete(
message=f'Successfully created lot with {len(cart)} parts. <a href="{lot_url}">View lot</a>',
data={
'lot_id': lot_id,
'lot_url': lot_url
}
)
return True
except ErrorException as e:
socket.fail(message=str(e))
logger.error('Error creating lot: {error}'.format(error=e))
return False
except Exception as e:
socket.fail(message='Unexpected error creating lot: {error}'.format(error=str(e)))
logger.error('Unexpected error creating lot: {error}'.format(error=e))
logger.error(traceback.format_exc())
return False
-86
View File
@@ -1,86 +0,0 @@
import logging
from typing import Self, TYPE_CHECKING
from .record_list import BrickRecordList
from .individual_part_lot import IndividualPartLot
if TYPE_CHECKING:
from .set_storage import BrickSetStorage
logger = logging.getLogger(__name__)
# List of individual part lots
class IndividualPartLotList(BrickRecordList):
# Queries
list_query: str = 'individual_part_lot/list/all'
by_part_and_color_query: str = 'individual_part_lot/list/by_part_and_color'
by_storage_query: str = 'individual_part_lot/list/by_storage'
using_storage_query: str = 'individual_part_lot/list/using_storage'
using_purchase_location_query: str = 'individual_part_lot/list/using_purchase_location'
without_storage_query: str = 'individual_part_lot/list/without_storage'
problem_query: str = 'individual_part_lot/list/problem'
# Get all individual part lots
def all(self, /) -> Self:
self.list(override_query=self.list_query)
return self
# Base individual part lot list
def list(
self,
/,
*,
override_query: str | None = None,
order: str | None = None,
limit: int | None = None,
**context,
) -> None:
# Load the individual part lots from the database
for record in super().select(
override_query=override_query,
order=order,
limit=limit,
**context
):
lot = IndividualPartLot(record=record)
self.records.append(lot)
# Set the record class
def set_record_class(self, /) -> None:
self.record_class = IndividualPartLot
# Get individual part lots containing a specific part and color
def by_part_and_color(self, part: str, color_id: int, /) -> Self:
self.fields.part = part
self.fields.color = color_id
self.list(override_query='individual_part_lot/list/by_part_and_color')
return self
# Get individual part lots by storage location
def by_storage(self, storage: 'BrickSetStorage', /) -> Self:
self.fields.storage = storage.fields.id
self.list(override_query=self.by_storage_query)
return self
# Get individual part lots using a specific storage location
def using_storage(self, storage: 'BrickSetStorage', /) -> Self:
self.fields.storage = storage.fields.id
self.list(override_query=self.using_storage_query)
return self
# Get individual part lots using a specific purchase location
def using_purchase_location(self, purchase_location: 'BrickSetPurchaseLocation', /) -> Self:
self.fields.purchase_location = purchase_location.fields.id
self.list(override_query=self.using_purchase_location_query)
return self
# Get individual part lots without storage
def without_storage(self, /) -> Self:
self.list(override_query=self.without_storage_query)
return self
# Get individual part lots with problems (containing parts with missing or damaged items)
def with_problems(self, /) -> Self:
self.list(override_query=self.problem_query)
return self
+21 -68
View File
@@ -13,6 +13,7 @@ import requests
from werkzeug.datastructures import FileStorage
from werkzeug.utils import secure_filename
import re
import cloudscraper
from .exceptions import ErrorException, DownloadException
if TYPE_CHECKING:
@@ -105,34 +106,12 @@ class BrickInstructions(object):
message=f'File {self.filename} already exists, skipped - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
)
# Use plain requests instead of cloudscraper
session = requests.Session()
session.headers.update({
'User-Agent': current_app.config['REBRICKABLE_USER_AGENT'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Cache-Control': 'max-age=0'
# Fetch PDF via cloudscraper (to bypass Cloudflare)
scraper = cloudscraper.create_scraper()
scraper.headers.update({
"User-Agent": current_app.config['REBRICKABLE_USER_AGENT']
})
# Visit the set's instructions listing page first to establish session cookies
set_number = None
if self.rebrickable:
set_number = self.rebrickable.fields.set
elif self.set:
set_number = self.set
if set_number:
instructions_page = f"https://rebrickable.com/instructions/{set_number}/"
session.get(instructions_page)
session.headers.update({"Referer": instructions_page})
resp = session.get(path, stream=True, allow_redirects=True)
resp = scraper.get(path, stream=True)
if not resp.ok:
raise DownloadException(f"Failed to download: HTTP {resp.status_code}")
@@ -193,16 +172,11 @@ class BrickInstructions(object):
if filename is None:
filename = self.filename
folder = current_app.config['INSTRUCTIONS_FOLDER']
# If folder is absolute, use it directly
# Otherwise, make it relative to app root (not static folder)
if os.path.isabs(folder):
base_path = folder
else:
base_path = os.path.join(current_app.root_path, folder)
return os.path.join(base_path, filename)
return os.path.join(
current_app.static_folder, # type: ignore
current_app.config['INSTRUCTIONS_FOLDER'],
filename
)
# Rename an instructions file
def rename(self, filename: str, /) -> None:
@@ -243,16 +217,10 @@ class BrickInstructions(object):
folder: str = current_app.config['INSTRUCTIONS_FOLDER']
# Determine which route to use based on folder path
# If folder contains 'data' (new structure), use data route
# Otherwise use static route (legacy)
if 'data' in folder:
return url_for('data.serve_data_file', folder='instructions', filename=self.filename)
else:
# Legacy: folder is relative to static/
folder_clean = folder.removeprefix('static/')
path = os.path.join(folder_clean, self.filename)
return url_for('static', filename=path)
# Compute the path
path = os.path.join(folder, self.filename)
return url_for('static', filename=path)
# Return the icon depending on the extension
def icon(self, /) -> str:
@@ -269,33 +237,20 @@ class BrickInstructions(object):
@staticmethod
def find_instructions(set: str, /) -> list[Tuple[str, str]]:
"""
Scrape Rebrickable's HTML and return a list of
Scrape Rebrickables HTML and return a list of
(filename_slug, download_url). Duplicate slugs get _1, _2, …
"""
page_url = f"https://rebrickable.com/instructions/{set}/"
logger.debug(f"[find_instructions] fetching HTML from {page_url!r}")
# Use plain requests instead of cloudscraper
session = requests.Session()
session.headers.update({
'User-Agent': current_app.config['REBRICKABLE_USER_AGENT'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'none',
'Cache-Control': 'max-age=0'
})
resp = session.get(page_url)
# Solve Cloudflares challenge
scraper = cloudscraper.create_scraper()
scraper.headers.update({'User-Agent': current_app.config['REBRICKABLE_USER_AGENT']})
resp = scraper.get(page_url)
if not resp.ok:
raise ErrorException(f'Failed to load instructions page for {set}. HTTP {resp.status_code}')
soup = BeautifulSoup(resp.content, 'html.parser')
# Match download links with or without query parameters (e.g., ?cfe=timestamp&cfk=key)
link_re = re.compile(r'^/instructions/\d+/.+/download/')
raw: list[tuple[str, str]] = []
@@ -308,10 +263,8 @@ class BrickInstructions(object):
alt_text = img['alt'].removeprefix('LEGO Building Instructions for ') # type: ignore
slug = re.sub(r'[^A-Za-z0-9]+', '-', alt_text).strip('-')
# Build the absolute download URL - this preserves query parameters
# BeautifulSoup's a['href'] includes the full href with ?cfe=...&cfk=... params
# Build the absolute download URL
download_url = urljoin('https://rebrickable.com', a['href']) # type: ignore
logger.debug(f"[find_instructions] Found download link: {download_url}")
raw.append((slug, download_url))
if not raw:
+5 -8
View File
@@ -36,14 +36,11 @@ class BrickInstructionsList(object):
# Try to list the files in the instruction folder
try:
folder_config: str = current_app.config['INSTRUCTIONS_FOLDER']
# If folder is absolute, use it directly
# Otherwise, make it relative to app root (not static folder)
if os.path.isabs(folder_config):
folder = folder_config
else:
folder = os.path.join(current_app.root_path, folder_config)
# Make a folder relative to static
folder: str = os.path.join(
current_app.static_folder, # type: ignore
current_app.config['INSTRUCTIONS_FOLDER'],
)
for file in os.scandir(folder):
instruction = BrickInstructions(file)
+84 -189
View File
@@ -10,7 +10,6 @@ from .record import BrickRecord
from .sql import BrickSQL
if TYPE_CHECKING:
from .individual_minifigure import IndividualMinifigure
from .individual_part import IndividualPart
from .set import BrickSet
logger = logging.getLogger(__name__)
@@ -20,16 +19,20 @@ logger = logging.getLogger(__name__)
class BrickMetadata(BrickRecord):
kind: str
# Set state endpoint
set_state_endpoint: str
# Endpoints (optional, not all metadata types use all of these)
set_state_endpoint: str = ''
individual_minifigure_state_endpoint: str = ''
individual_minifigure_value_endpoint: str = ''
# Queries
delete_query: str
insert_query: str
select_query: str
update_field_query: str
update_set_state_query: str
update_set_value_query: str
update_set_state_query: str = ''
update_set_value_query: str = ''
update_individual_minifigure_state_query: str = ''
update_individual_minifigure_value_query: str = ''
def __init__(
self,
@@ -108,24 +111,19 @@ class BrickMetadata(BrickRecord):
metadata_id=self.fields.id
)
# URL to change the selected state of this metadata item for an individual part
def url_for_individual_part_state(self, part_id: str, /) -> str:
# Replace 'set' with 'individual_part' in the endpoint name
endpoint = self.set_state_endpoint.replace('set.', 'individual_part.')
# URL to change the selected state of this metadata item for an individual minifigure
def url_for_individual_minifigure_state(self, id: str, /) -> str:
return url_for(
endpoint,
id=part_id,
self.individual_minifigure_state_endpoint,
id=id,
metadata_id=self.fields.id
)
# URL to change the selected state of this metadata item for an individual minifigure
def url_for_individual_minifigure_state(self, minifigure_id: str, /) -> str:
# Replace 'set' with 'individual_minifigure' in the endpoint name
endpoint = self.set_state_endpoint.replace('set.', 'individual_minifigure.')
# URL to change the value for an individual minifigure
def url_for_individual_minifigure_value(self, id: str, /) -> str:
return url_for(
endpoint,
id=minifigure_id,
metadata_id=self.fields.id
self.individual_minifigure_value_endpoint,
id=id
)
# Select a specific metadata (with an id)
@@ -204,8 +202,7 @@ class BrickMetadata(BrickRecord):
/,
*,
json: Any | None = None,
state: Any | None = None,
commit: bool = True
state: Any | None = None
) -> Any:
if state is None and json is not None:
state = json.get('value', False)
@@ -214,24 +211,16 @@ class BrickMetadata(BrickRecord):
parameters['set_id'] = brickset.fields.id
parameters['state'] = state
if commit:
rows, _ = BrickSQL().execute_and_commit(
self.update_set_state_query,
parameters=parameters,
name=self.as_column(),
)
else:
rows, _ = BrickSQL().execute(
self.update_set_state_query,
parameters=parameters,
defer=True,
name=self.as_column(),
)
rows, _ = BrickSQL().execute_and_commit(
self.update_set_state_query,
parameters=parameters,
name=self.as_column(),
)
# When deferred, rows will be -1, so skip the check
if commit and rows != 1:
raise DatabaseException('Could not update the {kind} state for set {set} ({id})'.format(
if rows != 1:
raise DatabaseException('Could not update the {kind} "{name}" state for set {set} ({id})'.format( # noqa: E501
kind=self.kind,
name=self.fields.name,
set=brickset.fields.set,
id=brickset.fields.id,
))
@@ -247,6 +236,65 @@ class BrickMetadata(BrickRecord):
return state
# Check if this metadata has a specific individual minifigure
def has_individual_minifigure(
self,
individual_minifigure: 'IndividualMinifigure',
/,
) -> bool:
"""Check if this owner/tag/status is assigned to a individual minifigure"""
# Determine the table name based on metadata type
table_name = f'bricktracker_individual_minifigure_{self.kind}s'
column_name = f'{self.kind}_{self.fields.id}'
# Query to check if the relationship exists using raw SQL
sql = BrickSQL()
query = f'SELECT COUNT(*) as count FROM "{table_name}" WHERE "id" = ? AND "{column_name}" = 1'
result = sql.cursor.execute(query, (individual_minifigure.fields.id,)).fetchone()
return result and result['count'] > 0
# Update the selected state of this metadata item for a individual minifigure
def update_individual_minifigure_state(
self,
individual_minifigure: 'IndividualMinifigure',
/,
*,
json: Any | None = None,
state: Any | None = None
) -> Any:
if state is None and json is not None:
state = json.get('value', False)
parameters = self.sql_parameters()
parameters['id'] = individual_minifigure.fields.id
parameters['state'] = state
rows, _ = BrickSQL().execute_and_commit(
self.update_individual_minifigure_state_query,
parameters=parameters,
name=self.as_column(),
)
if rows != 1:
raise DatabaseException('Could not update the {kind} "{name}" state for individual minifigure {figure} ({id})'.format(
kind=self.kind,
name=self.fields.name,
figure=individual_minifigure.fields.figure,
id=individual_minifigure.fields.id,
))
# Info
logger.info('{kind} "{name}" state changed to "{state}" for individual minifigure {figure} ({id})'.format(
kind=self.kind,
name=self.fields.name,
state=state,
figure=individual_minifigure.fields.figure,
id=individual_minifigure.fields.id,
))
return state
# Update the selected value of this metadata item for a set
def update_set_value(
self,
@@ -292,156 +340,3 @@ class BrickMetadata(BrickRecord):
))
return value
# Update the selected state of this metadata item for an individual part
def update_individual_part_state(
self,
individual_part: 'IndividualPart',
/,
*,
json: Any | None = None,
state: Any | None = None,
commit: bool = True
) -> Any:
if state is None and json is not None:
state = json.get('value', False)
parameters = self.sql_parameters()
parameters['set_id'] = individual_part.fields.id # set_id parameter accepts any entity id
parameters['state'] = state
# Use the same set query (bricktracker_set_owners/tags/statuses tables accept any entity id)
query_name = self.update_set_state_query
if commit:
rows, _ = BrickSQL().execute_and_commit(
query_name,
parameters=parameters,
name=self.as_column(),
)
else:
rows, _ = BrickSQL().execute(
query_name,
parameters=parameters,
defer=True,
name=self.as_column(),
)
# When deferred, rows will be -1, so skip the check
if commit and rows != 1:
raise DatabaseException('Could not update the {kind} state for individual part {part_id}'.format(
kind=self.kind,
part_id=individual_part.fields.id,
))
# Info
logger.info('{kind} "{name}" state changed to "{state}" for individual part {part_id}'.format(
kind=self.kind,
name=self.fields.name,
state=state,
part_id=individual_part.fields.id,
))
return state
# Update the selected state of this metadata item for an individual minifigure
def update_individual_minifigure_state(
self,
individual_minifigure: 'IndividualMinifigure',
/,
*,
json: Any | None = None,
state: Any | None = None,
commit: bool = True
) -> Any:
if state is None and json is not None:
state = json.get('value', False)
parameters = self.sql_parameters()
parameters['set_id'] = individual_minifigure.fields.id # set_id parameter accepts any entity id
parameters['state'] = state
# Use the same set query (bricktracker_set_owners/tags/statuses tables accept any entity id)
query_name = self.update_set_state_query
if commit:
rows, _ = BrickSQL().execute_and_commit(
query_name,
parameters=parameters,
name=self.as_column(),
)
else:
rows, _ = BrickSQL().execute(
query_name,
parameters=parameters,
defer=True,
name=self.as_column(),
)
# When deferred, rows will be -1, so skip the check
if commit and rows != 1:
raise DatabaseException('Could not update the {kind} state for individual minifigure {minifigure_id}'.format(
kind=self.kind,
minifigure_id=individual_minifigure.fields.id,
))
# Info
logger.info('{kind} "{name}" state changed to "{state}" for individual minifigure {minifigure_id}'.format(
kind=self.kind,
name=self.fields.name,
state=state,
minifigure_id=individual_minifigure.fields.id,
))
return state
# Update the selected state of this metadata item for an individual part lot
def update_individual_part_lot_state(
self,
individual_part_lot: 'IndividualPartLot',
/,
*,
json: Any | None = None,
state: Any | None = None,
commit: bool = True
) -> Any:
if state is None and json is not None:
state = json.get('value', False)
parameters = self.sql_parameters()
parameters['set_id'] = individual_part_lot.fields.id # set_id parameter accepts any entity id
parameters['state'] = state
# Use the same set query (bricktracker_set_owners/tags tables accept any entity id)
query_name = self.update_set_state_query
if commit:
rows, _ = BrickSQL().execute_and_commit(
query_name,
parameters=parameters,
name=self.as_column(),
)
else:
rows, _ = BrickSQL().execute(
query_name,
parameters=parameters,
defer=True,
name=self.as_column(),
)
# When deferred, rows will be -1, so skip the check
if commit and rows != 1:
raise DatabaseException('Could not update the {kind} state for individual part lot {lot_id}'.format(
kind=self.kind,
lot_id=individual_part_lot.fields.id,
))
# Info
logger.info('{kind} "{name}" state changed to "{state}" for individual part lot {lot_id}'.format(
kind=self.kind,
name=self.fields.name,
state=state,
lot_id=individual_part_lot.fields.id,
))
return state
+12 -20
View File
@@ -39,9 +39,10 @@ class BrickMetadataList(BrickRecordList[T]):
# Queries
select_query: str
# Set endpoints
set_state_endpoint: str
set_value_endpoint: str
# List-specific endpoints (for operations on the list itself)
set_state_endpoint: str = ''
set_value_endpoint: str = ''
individual_minifigure_value_endpoint: str = ''
def __init__(
self,
@@ -99,12 +100,15 @@ class BrickMetadataList(BrickRecordList[T]):
# Return the items as columns for a select
@classmethod
def as_columns(cls, /, **kwargs) -> str:
def as_columns(cls, /, table: str | None = None, **kwargs) -> str:
new = cls.new()
# Use provided table name or default to class table
table_name = table if table is not None else cls.table
return ', '.join([
'"{table}"."{column}"'.format(
table=cls.table,
table=table_name,
column=record.as_column(),
)
for record
@@ -185,22 +189,10 @@ class BrickMetadataList(BrickRecordList[T]):
id=id,
)
# URL to change the selected value of this metadata item for an individual part
@classmethod
def url_for_individual_part_value(cls, part_id: str, /) -> str:
# Replace 'set' with 'individual_part' in the endpoint name
endpoint = cls.set_value_endpoint.replace('set.', 'individual_part.')
return url_for(
endpoint,
id=part_id,
)
# URL to change the selected value of this metadata item for an individual minifigure
@classmethod
def url_for_individual_minifigure_value(cls, minifigure_id: str, /) -> str:
# Replace 'set' with 'individual_minifigure' in the endpoint name
endpoint = cls.set_value_endpoint.replace('set.', 'individual_minifigure.')
def url_for_individual_minifigure_value(cls, id: str, /) -> str:
return url_for(
endpoint,
id=minifigure_id,
cls.individual_minifigure_value_endpoint,
id=id,
)
-88
View File
@@ -1,88 +0,0 @@
"""
Migration 0027: Consolidate metadata tables - remove FK constraints from set metadata tables
This migration removes foreign key constraints from bricktracker_set_owners, _tags, and _statuses
so they can accept any entity ID (sets, individual parts, individual minifigures, individual part lots).
Since these tables have dynamically added columns, we need to read the schema and recreate the tables
with all existing columns but without the foreign key constraints.
"""
import logging
from typing import Any, TYPE_CHECKING
if TYPE_CHECKING:
from ..sql import BrickSQL
logger = logging.getLogger(__name__)
def migration_0027(sql: 'BrickSQL') -> dict[str, Any]:
"""
Remove foreign key constraints from set metadata junction tables.
This allows the tables to store metadata for any entity type, not just sets.
"""
tables_to_migrate = [
'bricktracker_set_owners',
'bricktracker_set_tags',
'bricktracker_set_statuses'
]
for table_name in tables_to_migrate:
logger.info('Migrating {table_name} to remove foreign key constraint'.format(
table_name=table_name
))
# Get the current table schema
cursor = sql.cursor.execute(f"PRAGMA table_info({table_name})")
columns = cursor.fetchall()
# Build column definitions for new table (without FK constraint)
column_defs = []
column_names = []
for col in columns:
col_name = col[1]
col_type = col[2]
col_not_null = col[3]
col_default = col[4]
col_pk = col[5]
column_names.append(f'"{col_name}"')
col_def = f'"{col_name}" {col_type}'
if col_pk:
col_def += ' PRIMARY KEY'
if col_not_null and not col_pk:
if col_default is not None:
col_def += f' NOT NULL DEFAULT {col_default}'
else:
col_def += ' NOT NULL'
elif col_default is not None:
col_def += f' DEFAULT {col_default}'
column_defs.append(col_def)
# Create new table without foreign key constraint
new_table_name = f'{table_name}_new'
create_sql = f'CREATE TABLE "{new_table_name}" ({", ".join(column_defs)})'
logger.debug('Creating new table: {sql}'.format(sql=create_sql))
sql.cursor.execute(create_sql)
# Copy all data
column_list = ', '.join(column_names)
copy_sql = f'INSERT INTO "{new_table_name}" ({column_list}) SELECT {column_list} FROM "{table_name}"'
logger.debug('Copying data: {sql}'.format(sql=copy_sql))
sql.cursor.execute(copy_sql)
# Drop old table
sql.cursor.execute(f'DROP TABLE "{table_name}"')
# Rename new table to old name
sql.cursor.execute(f'ALTER TABLE "{new_table_name}" RENAME TO "{table_name}"')
logger.info('Successfully migrated {table_name}'.format(table_name=table_name))
logger.info('Migration 0027 complete - all set metadata tables now accept any entity ID')
return {}
+6 -7
View File
@@ -33,7 +33,11 @@ class BrickMinifigure(RebrickableMinifigure):
)
)
# Load the inventory (needed to count parts for rebrickable record)
if not refresh:
# Insert into database
self.insert(commit=False)
# Load the inventory
if not BrickPartList.download(
socket,
self.brickset,
@@ -42,14 +46,9 @@ class BrickMinifigure(RebrickableMinifigure):
):
return False
# Insert the rebrickable minifigure into database first (parent record)
# This must happen before inserting into bricktracker_minifigures due to FK constraint
# Insert the rebrickable set into database (after counting parts)
self.insert_rebrickable()
if not refresh:
# Insert into bricktracker_minifigures database (child record)
self.insert(commit=False)
except Exception as e:
socket.fail(
message='Error while importing minifigure {figure} from {set}: {error}'.format( # noqa: E501
+8 -68
View File
@@ -20,8 +20,8 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
order: str
# Queries
all_query: str = 'minifigure/list/all_unified'
all_by_owner_query: str = 'minifigure/list/all_by_owner_unified'
all_query: str = 'minifigure/list/all'
all_by_owner_query: str = 'minifigure/list/all_by_owner'
damaged_part_query: str = 'minifigure/list/damaged_part'
last_query: str = 'minifigure/list/last'
missing_part_query: str = 'minifigure/list/missing_part'
@@ -43,31 +43,6 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
return self
# Load all minifigures with problems filter
def all_filtered(self, /, owner_id: str | None = None, problems_filter: str = 'all', theme_id: str = 'all', year: str = 'all', individuals_filter: str = 'all') -> Self:
# Save the owner_id parameter
if owner_id is not None:
self.fields.owner_id = owner_id
context = {}
if problems_filter and problems_filter != 'all':
context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
if individuals_filter and individuals_filter != 'all':
context['individuals_filter'] = individuals_filter
# Choose query based on whether owner filtering is needed
if owner_id and owner_id != 'all':
query = self.all_by_owner_query
else:
query = self.all_query
self.list(override_query=query, **context)
return self
# Load all minifigures by owner
def all_by_owner(self, owner_id: str | None = None, /) -> Self:
# Save the owner_id parameter
@@ -78,34 +53,10 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
return self
# Load all minifigures by owner with problems filter
def all_by_owner_filtered(self, /, owner_id: str | None = None, problems_filter: str = 'all', theme_id: str = 'all', year: str = 'all', individuals_filter: str = 'all') -> Self:
# Save the owner_id parameter
self.fields.owner_id = owner_id
context = {}
if problems_filter and problems_filter != 'all':
context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
if individuals_filter and individuals_filter != 'all':
context['individuals_filter'] = individuals_filter
# Load the minifigures from the database
self.list(override_query=self.all_by_owner_query, **context)
return self
# Load minifigures with pagination support
def all_filtered_paginated(
self,
owner_id: str | None = None,
problems_filter: str = 'all',
theme_id: str = 'all',
year: str = 'all',
individuals_filter: str = 'all',
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
@@ -123,26 +74,15 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
if search_query:
filter_context['search_query'] = search_query
if problems_filter and problems_filter != 'all':
filter_context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
if individuals_filter and individuals_filter != 'all':
filter_context['individuals_filter'] = individuals_filter
# Field mapping for sorting (using column names from the unified query)
# Field mapping for sorting
field_mapping = {
'name': '"name"',
'parts': '"number_of_parts"',
'name': '"combined"."name"',
'parts': '"combined"."number_of_parts"',
'quantity': '"total_quantity"',
'missing': '"total_missing"',
'damaged': '"total_damaged"',
'sets': '"total_sets"'
'sets': '"total_sets"',
'individual': '"total_individual"'
}
# Use the base pagination method
@@ -173,7 +113,7 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
if current_app.config['RANDOM']:
order = 'RANDOM()'
else:
order = '"bricktracker_minifigures"."rowid" DESC'
order = '"combined"."rowid" DESC'
self.list(override_query=self.last_query, order=order, limit=limit)
+18 -15
View File
@@ -5,32 +5,35 @@ from .exceptions import ErrorException
def parse_set(set: str, /) -> str:
number, _, version = set.partition('-')
# Set number can be alphanumeric (e.g., "McDR6US", "10312", "COMCON035")
# Just validate it's not empty
if not number or number.strip() == '':
raise ErrorException('Set number cannot be empty')
# Clean up the number (trim whitespace)
number = number.strip()
# Version defaults to 1 if not provided
# Making sure both are integers
if version == '':
version = '1'
version = 1
# Version must be a valid number (but preserve leading zeros for minifigures)
try:
version_int = int(version)
number = int(number)
except Exception:
raise ErrorException('Number "{number}" is not a number'.format(
number=number,
))
try:
version = int(version)
except Exception:
raise ErrorException('Version "{version}" is not a number'.format(
version=version,
))
if version_int < 0:
raise ErrorException('Version "{version}" should be positive'.format(
# Make sure both are positive
if number < 0:
raise ErrorException('Number "{number}" should be positive'.format(
number=number,
))
if version < 0:
raise ErrorException('Version "{version}" should be positive'.format( # noqa: E501
version=version,
))
# Preserve original version string to keep leading zeros (important for minifigures like fig-000484)
return '{number}-{version}'.format(number=number, version=version)
+100 -136
View File
@@ -9,7 +9,6 @@ from .exceptions import ErrorException, NotFoundException
from .rebrickable_part import RebrickablePart
from .sql import BrickSQL
if TYPE_CHECKING:
from .individual_minifigure import IndividualMinifigure
from .minifigure import BrickMinifigure
from .set import BrickSet
from .socket import BrickSocket
@@ -24,7 +23,6 @@ class BrickPart(RebrickablePart):
# Queries
insert_query: str = 'part/insert'
update_on_refresh_query: str = 'part/update_on_refresh'
generic_query: str = 'part/select/generic'
select_query: str = 'part/select/specific'
@@ -34,7 +32,6 @@ class BrickPart(RebrickablePart):
*,
brickset: 'BrickSet | None' = None,
minifigure: 'BrickMinifigure | None' = None,
individual_minifigure: 'IndividualMinifigure | None' = None,
record: Row | dict[str, Any] | None = None
):
super().__init__(
@@ -43,12 +40,7 @@ class BrickPart(RebrickablePart):
record=record
)
self.individual_minifigure = individual_minifigure
if self.individual_minifigure is not None:
self.identifier = self.individual_minifigure.fields.id
self.kind = 'Individual Minifigure'
elif self.minifigure is not None:
if self.minifigure is not None:
self.identifier = self.minifigure.fields.figure
self.kind = 'Minifigure'
elif self.brickset is not None:
@@ -70,35 +62,13 @@ class BrickPart(RebrickablePart):
)
)
# Insert the rebrickable part into database first (parent record)
# This must happen before inserting into bricktracker_parts due to FK constraint
self.insert_rebrickable()
if refresh:
params = self.sql_parameters()
# Track this part in the refresh temp table (for orphan cleanup later)
BrickSQL().execute(
'part/track_refresh_part',
parameters=params,
defer=False
)
# Try to update existing part first (preserves checked, missing, and damaged states)
# Note: Cannot defer this because we need to check if rows were affected
rows, _ = BrickSQL().execute(
self.update_on_refresh_query,
parameters=params,
defer=False
)
# If no rows were updated, the part doesn't exist yet, so insert it
if rows == 0:
self.insert(commit=False)
else:
# Insert into bricktracker_parts database (child record)
if not refresh:
# Insert into database
self.insert(commit=False)
# Insert the rebrickable set into database
self.insert_rebrickable()
except Exception as e:
socket.fail(
message='Error while importing part {part} from {kind} {identifier}: {error}'.format( # noqa: E501
@@ -189,33 +159,6 @@ class BrickPart(RebrickablePart):
return self
# Select a specific part from an individual minifigure instance
def select_specific_individual_minifigure(
self,
individual_minifigure: 'IndividualMinifigure',
part: str,
color: int,
spare: int,
/,
) -> Self:
# Save the parameters to the fields
self.individual_minifigure = individual_minifigure
self.fields.part = part
self.fields.color = color
self.fields.spare = spare
if not self.select(override_query='individual_minifigure/part/select/specific'):
raise NotFoundException(
'Part {part} with color {color} (spare: {spare}) from individual minifigure {id} was not found in the database'.format( # noqa: E501
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
id=individual_minifigure.fields.id,
),
)
return self
# Update checked state for part walkthrough
def update_checked(self, json: Any | None, /) -> bool:
# Handle both direct 'checked' key and changer.js 'value' key format
@@ -236,56 +179,33 @@ class BrickPart(RebrickablePart):
return checked
# Update checked state for individual minifigure part
def update_checked_individual_minifigure(self, json: Any | None, /) -> bool:
# Handle both direct 'checked' key and changer.js 'value' key format
if json:
checked = json.get('checked', json.get('value', False))
else:
checked = False
checked = bool(checked)
self.fields.checked = checked
BrickSQL().execute_and_commit(
'individual_minifigure/part/update/checked',
parameters=self.sql_parameters()
)
return checked
# Compute the url for updating checked state
def url_for_checked(self, /) -> str:
# Different URL for individual minifigure part
if self.individual_minifigure is not None:
# Check if this is an individual minifigure (has minifigure with id field, no brickset)
if self.minifigure is not None and hasattr(self.minifigure.fields, 'id') and self.brickset is None:
# Individual minifigure part
return url_for(
'individual_minifigure.checked_part',
id=self.individual_minifigure.fields.id,
id=self.minifigure.fields.id,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
)
# Different URL for a set minifigure part
elif self.minifigure is not None:
return url_for(
'set.checked_part',
id=self.fields.id,
figure=self.minifigure.fields.figure,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
)
# Set part
# Set-based part (with or without minifigure)
if self.minifigure is not None:
figure = self.minifigure.fields.figure
else:
return url_for(
'set.checked_part',
id=self.fields.id,
figure=None,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
)
figure = None
return url_for(
'set.checked_part',
id=self.fields.id,
figure=figure,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
)
# Update a problematic part
def update_problem(self, problem: str, json: Any | None, /) -> int:
@@ -317,6 +237,65 @@ class BrickPart(RebrickablePart):
return amount
# Compute the url for problematic part
def url_for_problem(self, problem: str, /) -> str:
# Check if this is an individual minifigure (has minifigure with id field, no brickset)
if self.minifigure is not None and hasattr(self.minifigure.fields, 'id') and self.brickset is None:
# Individual minifigure part
return url_for(
'individual_minifigure.problem_part',
id=self.minifigure.fields.id,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
problem=problem,
)
# Set-based part (with or without minifigure)
if self.minifigure is not None:
figure = self.minifigure.fields.figure
else:
figure = None
return url_for(
'set.problem_part',
id=self.fields.id,
figure=figure,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
problem=problem,
)
# Select a specific part from an individual minifigure
def select_specific_individual_minifigure(
self,
minifigure: 'BrickMinifigure',
part: str,
color: int,
spare: int,
/,
) -> Self:
# Save the parameters to the fields
self.minifigure = minifigure
self.fields.id = minifigure.fields.id
self.fields.part = part
self.fields.color = color
self.fields.spare = spare
if not self.select(override_query='individual_minifigure/part/select/specific'):
raise NotFoundException(
'Part {part} with color {color} (spare: {spare}) from individual minifigure {figure} ({id}) was not found in the database'.format(
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
figure=self.minifigure.fields.figure,
id=self.minifigure.fields.id,
),
)
return self
# Update a problematic part for individual minifigure
def update_problem_individual_minifigure(self, problem: str, json: Any | None, /) -> int:
amount: str | int = json.get('value', '') # type: ignore
@@ -347,37 +326,22 @@ class BrickPart(RebrickablePart):
return amount
# Compute the url for problematic part
def url_for_problem(self, problem: str, /) -> str:
# Different URL for individual minifigure part
if self.individual_minifigure is not None:
return url_for(
'individual_minifigure.problem_part',
id=self.individual_minifigure.fields.id,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
problem=problem,
)
# Different URL for set minifigure part
elif self.minifigure is not None:
return url_for(
'set.problem_part',
id=self.fields.id,
figure=self.minifigure.fields.figure,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
problem=problem,
)
# Set part
# Update checked state for individual minifigure part
def update_checked_individual_minifigure(self, json: Any | None, /) -> bool:
# Handle both direct 'checked' key and changer.js 'value' key format
if json:
checked = json.get('checked', json.get('value', False))
else:
return url_for(
'set.problem_part',
id=self.fields.id,
figure=None,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
problem=problem,
checked = False
checked = bool(checked)
# Update the field
self.fields.checked = checked
BrickSQL().execute_and_commit(
'individual_minifigure/part/update/checked',
parameters=self.sql_parameters()
)
return checked
+18 -102
View File
@@ -19,13 +19,13 @@ logger = logging.getLogger(__name__)
class BrickPartList(BrickRecordList[BrickPart]):
brickset: 'BrickSet | None'
minifigure: 'BrickMinifigure | None'
individual_minifigure: 'IndividualMinifigure | None'
order: str
# Queries
all_query: str = 'part/list/all'
all_by_owner_query: str = 'part/list/all_by_owner'
different_color_query = 'part/list/with_different_color'
individual_minifigure_query: str = 'individual_minifigure/part/list/from_instance'
last_query: str = 'part/list/last'
minifigure_query: str = 'part/list/from_minifigure'
problem_query: str = 'part/list/problem'
@@ -58,8 +58,8 @@ class BrickPartList(BrickRecordList[BrickPart]):
return self
# Load all parts with filters (owner, color, theme, year, individuals)
def all_filtered(self, owner_id: str | None = None, color_id: str | None = None, theme_id: str | None = None, year: str | None = None, individuals_filter: str | None = None, /) -> Self:
# Load all parts with filters (owner and/or color)
def all_filtered(self, owner_id: str | None = None, color_id: str | None = None, /) -> Self:
# Save the filter parameters
if owner_id is not None:
self.fields.owner_id = owner_id
@@ -74,15 +74,8 @@ class BrickPartList(BrickRecordList[BrickPart]):
# Prepare context for query
context = {}
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
if current_app.config.get('SKIP_SPARE_PARTS', False):
context['skip_spare_parts'] = True
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
if individuals_filter and individuals_filter == 'only':
context['individuals_filter'] = True
# Load the parts from the database
self.list(override_query=query, **context)
@@ -94,9 +87,6 @@ class BrickPartList(BrickRecordList[BrickPart]):
self,
owner_id: str | None = None,
color_id: str | None = None,
theme_id: str | None = None,
year: str | None = None,
individuals_filter: str | None = None,
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
@@ -113,16 +103,9 @@ class BrickPartList(BrickRecordList[BrickPart]):
if color_id and color_id != 'all':
filter_context['color_id'] = color_id
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
if individuals_filter and individuals_filter == 'only':
filter_context['individuals_filter'] = True
if search_query:
filter_context['search_query'] = search_query
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
if current_app.config.get('SKIP_SPARE_PARTS', False):
filter_context['skip_spare_parts'] = True
# Field mapping for sorting
@@ -171,11 +154,6 @@ class BrickPartList(BrickRecordList[BrickPart]):
else:
minifigure = None
if hasattr(self, 'individual_minifigure'):
individual_minifigure = self.individual_minifigure
else:
individual_minifigure = None
# Prepare template context for filtering
context_vars = {}
if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None:
@@ -199,7 +177,6 @@ class BrickPartList(BrickRecordList[BrickPart]):
part = BrickPart(
brickset=brickset,
minifigure=minifigure,
individual_minifigure=individual_minifigure,
record=record,
)
@@ -217,13 +194,8 @@ class BrickPartList(BrickRecordList[BrickPart]):
self.brickset = brickset
self.minifigure = minifigure
# Prepare context for hiding spare parts if configured
context = {}
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the parts from the database
self.list(**context)
self.list()
return self
@@ -236,31 +208,22 @@ class BrickPartList(BrickRecordList[BrickPart]):
# Save the minifigure
self.minifigure = minifigure
# Prepare context for hiding spare parts if configured
context = {}
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the parts from the database
self.list(override_query=self.minifigure_query, **context)
self.list(override_query=self.minifigure_query)
return self
# Load parts from an individual minifigure instance
def from_individual_minifigure(
self,
individual_minifigure: 'IndividualMinifigure',
minifigure: 'BrickMinifigure',
/,
) -> Self:
from .individual_minifigure import IndividualMinifigure
# Save the minifigure
self.minifigure = minifigure
# Save the individual minifigure reference
self.individual_minifigure = individual_minifigure
# Load the parts for this individual minifigure instance
self.list(
override_query='individual_minifigure/part/list/from_instance'
)
# Load the parts from the database using the instance-specific query
self.list(override_query=self.individual_minifigure_query)
return self
@@ -284,30 +247,13 @@ class BrickPartList(BrickRecordList[BrickPart]):
return self
# Last added parts
def last(self, /, *, limit: int = 6) -> Self:
if current_app.config['RANDOM']:
order = 'RANDOM()'
else:
# Since bricktracker_parts has a composite primary key, it doesn't have a rowid
# Order by id DESC (which are UUIDs with timestamps) to get recent parts
order = '"combined"."id" DESC, "combined"."part" ASC'
context = {}
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
self.list(override_query=self.last_query, order=order, limit=limit, **context)
return self
# Load problematic parts
def problem(self, /) -> Self:
self.list(override_query=self.problem_query)
return self
def problem_filtered(self, owner_id: str | None = None, color_id: str | None = None, theme_id: str | None = None, year: str | None = None, storage_id: str | None = None, tag_id: str | None = None, /) -> Self:
def problem_filtered(self, owner_id: str | None = None, color_id: str | None = None, /) -> Self:
# Save the filter parameters for client-side filtering
if owner_id is not None:
self.fields.owner_id = owner_id
@@ -320,16 +266,7 @@ class BrickPartList(BrickRecordList[BrickPart]):
context['owner_id'] = owner_id
if color_id and color_id != 'all':
context['color_id'] = color_id
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
if storage_id and storage_id != 'all':
context['storage_id'] = storage_id
if tag_id and tag_id != 'all':
context['tag_id'] = tag_id
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
if current_app.config.get('SKIP_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the problematic parts from the database
@@ -341,10 +278,6 @@ class BrickPartList(BrickRecordList[BrickPart]):
self,
owner_id: str | None = None,
color_id: str | None = None,
theme_id: str | None = None,
year: str | None = None,
storage_id: str | None = None,
tag_id: str | None = None,
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
@@ -357,18 +290,9 @@ class BrickPartList(BrickRecordList[BrickPart]):
filter_context['owner_id'] = owner_id
if color_id and color_id != 'all':
filter_context['color_id'] = color_id
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
if storage_id and storage_id != 'all':
filter_context['storage_id'] = storage_id
if tag_id and tag_id != 'all':
filter_context['tag_id'] = tag_id
if search_query:
filter_context['search_query'] = search_query
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
if current_app.config.get('SKIP_SPARE_PARTS', False):
filter_context['skip_spare_parts'] = True
# Field mapping for sorting
@@ -397,13 +321,11 @@ class BrickPartList(BrickRecordList[BrickPart]):
def sql_parameters(self, /) -> dict[str, Any]:
parameters: dict[str, Any] = super().sql_parameters()
# Set id
# Set id - prioritize brickset, then check minifigure
if self.brickset is not None:
parameters['id'] = self.brickset.fields.id
# Use the individual minifigure ID if present
if hasattr(self, 'individual_minifigure') and self.individual_minifigure is not None:
parameters['id'] = self.individual_minifigure.fields.id
elif self.minifigure is not None and hasattr(self.minifigure.fields, 'id'):
parameters['id'] = self.minifigure.fields.id
# Use the minifigure number if present,
if self.minifigure is not None:
@@ -472,13 +394,7 @@ class BrickPartList(BrickRecordList[BrickPart]):
# Process each part
number_of_parts: int = 0
skip_spares = current_app.config.get('SKIP_SPARE_PARTS', False)
for part in inventory:
# Skip spare parts if configured
if skip_spares and part.fields.spare:
continue
# Count the number of parts for minifigures
if minifigure is not None:
number_of_parts += part.fields.quantity
+5 -4
View File
@@ -7,6 +7,7 @@ from typing import Any, NamedTuple, TYPE_CHECKING
from urllib.parse import urljoin
from bs4 import BeautifulSoup
import cloudscraper
from flask import current_app, url_for
import requests
@@ -52,12 +53,12 @@ def get_peeron_scan_url(set_number: str, version_number: str):
def create_peeron_scraper():
"""Create a requests session configured for Peeron"""
session = requests.Session()
session.headers.update({
"""Create a cloudscraper instance configured for Peeron"""
scraper = cloudscraper.create_scraper()
scraper.headers.update({
"User-Agent": get_peeron_user_agent()
})
return session
return scraper
def get_peeron_cache_dir():
+5 -9
View File
@@ -4,6 +4,7 @@ import tempfile
import time
from typing import Any, TYPE_CHECKING
import cloudscraper
from flask import current_app
from PIL import Image
@@ -187,15 +188,10 @@ class PeeronPDF(object):
# Get target file path
def _get_target_path(self, /) -> str:
"""Get the full path where the PDF should be saved"""
folder = current_app.config['INSTRUCTIONS_FOLDER']
# If folder is absolute, use it directly
# Otherwise, make it relative to app root (not static folder)
if os.path.isabs(folder):
instructions_folder = folder
else:
instructions_folder = os.path.join(current_app.root_path, folder)
instructions_folder = os.path.join(
current_app.static_folder, # type: ignore
current_app.config['INSTRUCTIONS_FOLDER']
)
return os.path.join(instructions_folder, self.filename)
# Create BrickInstructions instance for the generated PDF
+10 -33
View File
@@ -53,7 +53,6 @@ class RebrickableImage(object):
if os.path.exists(path):
return
# Get the URL (this handles nil images via url() method)
url = self.url()
if url is None:
return
@@ -97,16 +96,9 @@ class RebrickableImage(object):
# Return the path depending on the objects provided
def path(self, /) -> str:
folder = self.folder()
# If folder is an absolute path (starts with /), use it directly
# Otherwise, make it relative to app root (current_app.root_path)
if folder.startswith('/'):
base_path = folder
else:
base_path = os.path.join(current_app.root_path, folder)
return os.path.join(
base_path,
current_app.static_folder, # type: ignore
self.folder(),
'{id}.{ext}'.format(id=self.id(), ext=self.extension),
)
@@ -124,11 +116,7 @@ class RebrickableImage(object):
else:
return self.minifigure.fields.image
# Handle set images - use nil placeholder if image is null
if self.set.fields.image is None:
return current_app.config['REBRICKABLE_IMAGE_NIL']
else:
return self.set.fields.image
return self.set.fields.image
# Return the name of the nil image file
@staticmethod
@@ -164,21 +152,10 @@ class RebrickableImage(object):
# _, extension = os.path.splitext(self.part_img_url)
extension = '.jpg'
# Determine which route to use based on folder path
# If folder contains 'data' (new structure), use data route
# Otherwise use static route (legacy - relative paths like 'parts', 'sets')
if 'data' in folder:
# Extract the folder type from the folder_name config key
# E.g., 'PARTS_FOLDER' -> 'parts', 'SETS_FOLDER' -> 'sets'
folder_type = folder_name.replace('_FOLDER', '').lower()
filename = '{name}{ext}'.format(name=name, ext=extension)
return url_for('data.serve_data_file', folder=folder_type, filename=filename)
else:
# Legacy: folder is relative to static/ (e.g., 'parts' or 'static/parts')
# Strip 'static/' prefix if present to avoid double /static/ in URL
folder_clean = folder.removeprefix('static/')
path = os.path.join(folder_clean, '{name}{ext}'.format(
name=name,
ext=extension,
))
return url_for('static', filename=path)
# Compute the path
path = os.path.join(folder, '{name}{ext}'.format(
name=name,
ext=extension,
))
return url_for('static', filename=path)
+6 -9
View File
@@ -14,6 +14,7 @@ if TYPE_CHECKING:
class RebrickableMinifigure(BrickRecord):
brickset: 'BrickSet | None'
# Queries
select_query: str = 'rebrickable/minifigure/select'
insert_query: str = 'rebrickable/minifigure/insert'
@@ -26,8 +27,10 @@ class RebrickableMinifigure(BrickRecord):
):
super().__init__()
# Save the brickset
self.brickset = brickset
# Ingest the record if it has one
if record is not None:
self.ingest(record)
@@ -59,6 +62,7 @@ class RebrickableMinifigure(BrickRecord):
return parameters
# Self url
def url(self, /) -> str:
return url_for(
'minifigure.details',
@@ -85,24 +89,17 @@ class RebrickableMinifigure(BrickRecord):
if current_app.config['REBRICKABLE_LINKS']:
try:
return current_app.config['REBRICKABLE_LINK_MINIFIGURE_PATTERN'].format( # noqa: E501
figure=self.fields.figure,
number=self.fields.figure,
)
except Exception:
pass
return ''
# Compute the url for the bricklink page
# Note: BrickLink uses different minifigure IDs than Rebrickable (e.g., 'adv010' vs 'fig-000359')
# Rebrickable API doesn't provide BrickLink minifigure IDs, so we can't generate valid links
def url_for_bricklink(self, /) -> str:
# BrickLink links disabled for minifigures - no ID mapping available
# Left function for later, if I find a way to implement it.
return ''
# Normalize from Rebrickable
@staticmethod
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
# Extracting number
number = int(str(data['set_num'])[5:])
return {
+1 -4
View File
@@ -67,11 +67,8 @@ class RebrickablePart(BrickRecord):
def sql_parameters(self, /) -> dict[str, Any]:
parameters = super().sql_parameters()
# Individual minifigure id takes precedence
if hasattr(self, 'individual_minifigure') and self.individual_minifigure is not None:
parameters['id'] = self.individual_minifigure.fields.id
# Set id
elif self.brickset is not None:
if self.brickset is not None:
parameters['id'] = self.brickset.fields.id
# Use the minifigure number if present,
+3 -25
View File
@@ -95,18 +95,6 @@ class RebrickableSet(BrickRecord):
socket.auto_progress(message='Parsing set number')
set = parse_set(str(data['set']))
# Check if this is actually a minifigure (starts with fig-)
# If so, redirect to the minifigure handler
if set.startswith('fig-'):
from .individual_minifigure import IndividualMinifigure
# Transform data: minifigure handler expects 'figure' key instead of 'set'
minifig_data = data.copy()
minifig_data['figure'] = minifig_data.pop('set')
if from_download:
return IndividualMinifigure().download(socket, minifig_data)
else:
return IndividualMinifigure().load(socket, minifig_data)
socket.auto_progress(
message='Set {set}: loading from Rebrickable'.format(
set=set,
@@ -167,18 +155,9 @@ class RebrickableSet(BrickRecord):
# Return a short form of the Rebrickable set
def short(self, /, *, from_download: bool = False) -> dict[str, Any]:
# Use nil image URL if set image is null
image_url = self.fields.image
if image_url is None:
# Return path to nil.png from parts folder
image_url = RebrickableImage.static_url(
RebrickableImage.nil_name(),
'PARTS_FOLDER'
)
return {
'download': from_download,
'image': image_url,
'image': self.fields.image,
'name': self.fields.name,
'set': self.fields.set,
}
@@ -217,18 +196,17 @@ class RebrickableSet(BrickRecord):
@staticmethod
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
# Extracting version and number
# Note: number can be alphanumeric (e.g., "McDR6US", "COMCON035")
number, _, version = str(data['set_num']).partition('-')
return {
'set': str(data['set_num']),
'number': str(number), # Keep as string to support alphanumeric sets
'number': int(number),
'version': int(version),
'name': str(data['name']),
'year': int(data['year']),
'theme_id': int(data['theme_id']),
'number_of_parts': int(data['num_parts']),
'image': str(data['set_img_url']) if data['set_img_url'] is not None else None,
'image': str(data['set_img_url']),
'url': str(data['set_url']),
'last_modified': str(data['last_modified_dt']),
}
-21
View File
@@ -1,4 +1,3 @@
from datetime import datetime
from sqlite3 import Row
from typing import Any, ItemsView
@@ -6,26 +5,6 @@ from .fields import BrickRecordFields
from .sql import BrickSQL
def format_timestamp(timestamp: float | str | None, format_key: str = 'PURCHASE_DATE_FORMAT') -> str:
if timestamp is not None:
from flask import current_app
# Handle legacy string dates stored in database (convert to numeric timestamp)
if isinstance(timestamp, str):
try:
# Try parsing as date string first
time = datetime.strptime(timestamp, '%Y/%m/%d')
except ValueError:
# If that fails, return the string as-is (shouldn't happen but safe fallback)
return timestamp
else:
# Normal case: numeric timestamp
time = datetime.fromtimestamp(timestamp)
return time.strftime(current_app.config.get(format_key, '%Y/%m/%d'))
return ''
# SQLite record
class BrickRecord(object):
select_query: str
+9 -112
View File
@@ -30,7 +30,6 @@ class BrickSet(RebrickableSet):
insert_query: str = 'set/insert'
update_purchase_date_query: str = 'set/update/purchase_date'
update_purchase_price_query: str = 'set/update/purchase_price'
update_description_query: str = 'set/update/description'
# Delete a set
def delete(self, /) -> None:
@@ -57,27 +56,8 @@ class BrickSet(RebrickableSet):
# Grabbing the refresh flag
refresh: bool = bool(data.get('refresh', False))
# Generate an UUID for self (or use existing ID if refreshing)
if refresh:
# Find the existing set by set number to get its ID
result = BrickSQL().raw_execute(
'SELECT "id" FROM "bricktracker_sets" WHERE "set" = :set',
{'set': self.fields.set}
).fetchone()
if result:
# Use existing set ID
self.fields.id = result['id']
else:
# If set doesn't exist in database, treat as new import
refresh = False
self.fields.id = str(uuid4())
else:
self.fields.id = str(uuid4())
# Insert the rebrickable set into database FIRST
# This must happen before inserting bricktracker_sets due to FK constraint
self.insert_rebrickable()
# Generate an UUID for self
self.fields.id = str(uuid4())
if not refresh:
# Save the storage
@@ -94,66 +74,25 @@ class BrickSet(RebrickableSet):
)
self.fields.purchase_location = purchase_location.fields.id
# Save the purchase date
purchase_date = data.get('purchase_date', None)
if purchase_date == '':
purchase_date = None
if purchase_date is not None:
try:
purchase_date = datetime.strptime(
purchase_date, '%Y/%m/%d'
).timestamp()
except Exception:
purchase_date = None
self.fields.purchase_date = purchase_date
# Save the purchase price
purchase_price = data.get('purchase_price', None)
if purchase_price == '':
purchase_price = None
if purchase_price is not None:
try:
purchase_price = float(purchase_price)
except Exception:
purchase_price = None
self.fields.purchase_price = purchase_price
# Save the description/notes
description = data.get('description', None)
if description == '':
description = None
self.fields.description = description
# Insert into database (deferred - will execute at final commit)
# All operations are atomic - if anything fails, nothing is committed
# Insert into database
self.insert(commit=False)
# Save the owners (deferred - will execute at final commit)
# Save the owners
owners: list[str] = list(data.get('owners', []))
for id in owners:
owner = BrickSetOwnerList.get(id)
owner.update_set_state(self, state=True, commit=False)
owner.update_set_state(self, state=True)
# Save the statuses (deferred - will execute at final commit)
statuses: list[str] = list(data.get('statuses', []))
for id in statuses:
status = BrickSetStatusList.get(id)
status.update_set_state(self, state=True, commit=False)
# Save the tags (deferred - will execute at final commit)
# Save the tags
tags: list[str] = list(data.get('tags', []))
for id in tags:
tag = BrickSetTagList.get(id)
tag.update_set_state(self, state=True, commit=False)
tag.update_set_state(self, state=True)
# If refreshing, prepare temp table for tracking parts across both set and minifigs
if refresh:
sql = BrickSQL()
sql.execute('part/create_temp_refresh_tracking_table', defer=False)
sql.execute('part/clear_temp_refresh_tracking_table', defer=False)
# Insert the rebrickable set into database
self.insert_rebrickable()
# Load the inventory
if not BrickPartList.download(socket, self, refresh=refresh):
@@ -163,15 +102,6 @@ class BrickSet(RebrickableSet):
if not BrickMinifigureList.download(socket, self, refresh=refresh):
return False
# If refreshing, clean up orphaned parts after all parts have been processed
if refresh:
# Delete orphaned parts (parts that weren't in the API response)
BrickSQL().execute(
'part/delete_untracked_parts',
parameters={'id': self.fields.id},
defer=False
)
# Commit the transaction to the database
socket.auto_progress(
message='Set {set}: writing to the database'.format(
@@ -423,36 +353,3 @@ class BrickSet(RebrickableSet):
# Update purchase price url
def url_for_purchase_price(self, /) -> str:
return url_for('set.update_purchase_price', id=self.fields.id)
# Update description
def update_description(self, json: Any | None, /) -> Any:
value = json.get('value', None) # type: ignore
if value == '':
value = None
self.fields.description = value
rows, _ = BrickSQL().execute_and_commit(
self.update_description_query,
parameters=self.sql_parameters()
)
if rows != 1:
raise DatabaseException('Could not update the description for set {set} ({id})'.format( # noqa: E501
set=self.fields.set,
id=self.fields.id,
))
# Info
logger.info('Description changed to "{value}" for set {set} ({id})'.format( # noqa: E501
value=value,
set=self.fields.set,
id=self.fields.id,
))
return value
# Update description url
def url_for_description(self, /) -> str:
return url_for('set.update_description', id=self.fields.id)
+18 -18
View File
@@ -36,7 +36,7 @@ class BrickSetList(BrickRecordList[BrickSet]):
using_minifigure_query: str = 'set/list/using_minifigure'
using_part_query: str = 'set/list/using_part'
using_storage_query: str = 'set/list/using_storage'
using_purchase_location_query: str = 'set/list/using_purchase_location'
without_storage_query: str = 'set/list/without_storage'
def __init__(self, /):
super().__init__()
@@ -93,15 +93,7 @@ class BrickSetList(BrickRecordList[BrickSet]):
# Convert theme name to theme ID for filtering
theme_id_filter = None
if theme_filter:
# Check if this is a NOT filter
if theme_filter.startswith('-'):
# Extract the actual theme value without the "-" prefix
actual_theme = theme_filter[1:]
theme_id = self._theme_name_to_id(actual_theme)
# Re-add the "-" prefix to the theme ID
theme_id_filter = f'-{theme_id}' if theme_id else None
else:
theme_id_filter = self._theme_name_to_id(theme_filter)
theme_id_filter = self._theme_name_to_id(theme_filter)
# Check if any filters are applied
has_filters = any([status_filter, theme_id_filter, owner_filter, purchase_location_filter, storage_filter, tag_filter, year_filter, duplicate_filter])
@@ -679,20 +671,17 @@ class BrickSetList(BrickRecordList[BrickSet]):
return self
# Sets using a purchase location
def using_purchase_location(self, purchase_location: BrickSetPurchaseLocation, /) -> Self:
# Save the parameters to the fields
self.fields.purchase_location = purchase_location.fields.id
# Load the sets from the database
self.list(override_query=self.using_purchase_location_query)
def without_storage(self, /) -> Self:
# Load sets with no storage
self.list(override_query=self.without_storage_query)
return self
# Helper to build the metadata lists
def set_metadata_lists(
as_class: bool = False
as_class: bool = False,
hardcoded_statuses_only: bool = False
) -> dict[
str,
Union[
@@ -704,9 +693,20 @@ def set_metadata_lists(
list[BrickSetTag]
]
]:
# Get all statuses
all_statuses = BrickSetStatusList.list(all=True)
# Filter to only hardcoded statuses if requested (for individual minifigures)
if hardcoded_statuses_only:
hardcoded_status_ids = ['minifigures_collected', 'set_checked', 'set_collected']
statuses = [s for s in all_statuses if s.fields.id in hardcoded_status_ids]
else:
statuses = all_statuses
return {
'brickset_owners': BrickSetOwnerList.list(),
'brickset_purchase_locations': BrickSetPurchaseLocationList.list(as_class=as_class), # noqa: E501
'brickset_statuses': statuses,
'brickset_storages': BrickSetStorageList.list(as_class=as_class),
'brickset_tags': BrickSetTagList.list(),
}
+3 -1
View File
@@ -5,8 +5,9 @@ from .metadata import BrickMetadata
class BrickSetOwner(BrickMetadata):
kind: str = 'owner'
# Set state endpoint
# Endpoints
set_state_endpoint: str = 'set.update_owner'
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_owner'
# Queries
delete_query: str = 'set/metadata/owner/delete'
@@ -14,3 +15,4 @@ class BrickSetOwner(BrickMetadata):
select_query: str = 'set/metadata/owner/select'
update_field_query: str = 'set/metadata/owner/update/field'
update_set_state_query: str = 'set/metadata/owner/update/state'
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/owner/update/state'
+3
View File
@@ -15,6 +15,9 @@ class BrickSetOwnerList(BrickMetadataList[BrickSetOwner]):
# Queries
select_query = 'set/metadata/owner/list'
# Endpoints
set_state_endpoint: str = 'set.update_owner'
# Instantiate the list with the proper class
@classmethod
def new(cls, /, *, force: bool = False) -> Self:
+4 -9
View File
@@ -1,22 +1,17 @@
from .metadata import BrickMetadata
from flask import url_for
# Lego set purchase location metadata
class BrickSetPurchaseLocation(BrickMetadata):
kind: str = 'purchase location'
# Endpoints
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_purchase_location'
# Queries
delete_query: str = 'set/metadata/purchase_location/delete'
insert_query: str = 'set/metadata/purchase_location/insert'
select_query: str = 'set/metadata/purchase_location/select'
update_field_query: str = 'set/metadata/purchase_location/update/field'
update_set_value_query: str = 'set/metadata/purchase_location/update/value'
# Self url
def url(self, /) -> str:
return url_for(
'purchase_location.details',
id=self.fields.id,
)
update_individual_minifigure_value_query: str = 'individual_minifigure/metadata/purchase_location/update/value'
@@ -22,6 +22,9 @@ class BrickSetPurchaseLocationList(
# Set value endpoint
set_value_endpoint: str = 'set.update_purchase_location'
# Individual minifigure value endpoint
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_purchase_location'
# Load all purchase locations
@classmethod
def all(cls, /) -> Self:
+3 -1
View File
@@ -7,8 +7,9 @@ from .metadata import BrickMetadata
class BrickSetStatus(BrickMetadata):
kind: str = 'status'
# Set state endpoint
# Endpoints
set_state_endpoint: str = 'set.update_status'
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_status'
# Queries
delete_query: str = 'set/metadata/status/delete'
@@ -16,6 +17,7 @@ class BrickSetStatus(BrickMetadata):
select_query: str = 'set/metadata/status/select'
update_field_query: str = 'set/metadata/status/update/field'
update_set_state_query: str = 'set/metadata/status/update/state'
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/status/update/state'
# Grab data from a form
def from_form(self, form: dict[str, str], /) -> Self:
+3
View File
@@ -15,6 +15,9 @@ class BrickSetStatusList(BrickMetadataList[BrickSetStatus]):
# Queries
select_query = 'set/metadata/status/list'
# Endpoints
set_state_endpoint: str = 'set.update_status'
# Filter the list of set status
def filter(self, all: bool = False) -> list[BrickSetStatus]:
return [
+4 -52
View File
@@ -1,6 +1,4 @@
from .metadata import BrickMetadata
from .exceptions import ErrorException
from .sql import BrickSQL
from flask import url_for
@@ -9,13 +7,16 @@ from flask import url_for
class BrickSetStorage(BrickMetadata):
kind: str = 'storage'
# Endpoints
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_storage'
# Queries
delete_query: str = 'set/metadata/storage/delete'
insert_query: str = 'set/metadata/storage/insert'
select_query: str = 'set/metadata/storage/select'
update_field_query: str = 'set/metadata/storage/update/field'
update_set_value_query: str = 'set/metadata/storage/update/value'
count_usage_query: str = 'set/metadata/storage/count_usage'
update_individual_minifigure_value_query: str = 'individual_minifigure/metadata/storage/update/value'
# Self url
def url(self, /) -> str:
@@ -23,52 +24,3 @@ class BrickSetStorage(BrickMetadata):
'storage.details',
id=self.fields.id,
)
# Delete from database - check if storage is in use first
def delete(self, /) -> None:
# Check if storage is being used
sql = BrickSQL()
result = sql.fetchone(self.count_usage_query, parameters={'id': self.fields.id})
if result:
sets_count = result[0]
minifigures_count = result[1]
parts_count = result[2]
lots_count = result[3]
total_count = sets_count + minifigures_count + parts_count + lots_count
if total_count > 0:
# Build error message with counts and link
error_parts = []
if sets_count > 0:
error_parts.append('{count} set{plural}'.format(
count=sets_count,
plural='s' if sets_count != 1 else ''
))
if minifigures_count > 0:
error_parts.append('{count} individual minifigure{plural}'.format(
count=minifigures_count,
plural='s' if minifigures_count != 1 else ''
))
if parts_count > 0:
error_parts.append('{count} individual part{plural}'.format(
count=parts_count,
plural='s' if parts_count != 1 else ''
))
if lots_count > 0:
error_parts.append('{count} part lot{plural}'.format(
count=lots_count,
plural='s' if lots_count != 1 else ''
))
error_message = 'Cannot delete storage location "{name}". You need to remove {items} from this storage before it can be deleted. <a href="{url}">View storage details</a>'.format(
name=self.fields.name,
items=', '.join(error_parts),
url=self.url()
)
raise ErrorException(error_message)
# If not in use, proceed with deletion
super().delete()
+3
View File
@@ -20,6 +20,9 @@ class BrickSetStorageList(BrickMetadataList[BrickSetStorage]):
# Set value endpoint
set_value_endpoint: str = 'set.update_storage'
# Individual minifigure value endpoint
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_storage'
# Load all storages
@classmethod
def all(cls, /) -> Self:
+3 -1
View File
@@ -5,8 +5,9 @@ from .metadata import BrickMetadata
class BrickSetTag(BrickMetadata):
kind: str = 'tag'
# Set state endpoint
# Endpoints
set_state_endpoint: str = 'set.update_tag'
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_tag'
# Queries
delete_query: str = 'set/metadata/tag/delete'
@@ -14,3 +15,4 @@ class BrickSetTag(BrickMetadata):
select_query: str = 'set/metadata/tag/select'
update_field_query: str = 'set/metadata/tag/update/field'
update_set_state_query: str = 'set/metadata/tag/update/state'
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/tag/update/state'
+3
View File
@@ -15,6 +15,9 @@ class BrickSetTagList(BrickMetadataList[BrickSetTag]):
# Queries
select_query: str = 'set/metadata/tag/list'
# Endpoints
set_state_endpoint: str = 'set.update_tag'
# Instantiate the list with the proper class
@classmethod
def new(cls, /, *, force: bool = False) -> Self:
+3 -89
View File
@@ -18,8 +18,6 @@ logger = logging.getLogger(__name__)
MESSAGES: Final[dict[str, str]] = {
'COMPLETE': 'complete',
'CONNECT': 'connect',
'CREATE_LOT': 'create_lot',
'CREATE_BULK_INDIVIDUAL_PARTS': 'create_bulk_individual_parts',
'DISCONNECT': 'disconnect',
'DOWNLOAD_INSTRUCTIONS': 'download_instructions',
'DOWNLOAD_PEERON_PAGES': 'download_peeron_pages',
@@ -27,13 +25,9 @@ MESSAGES: Final[dict[str, str]] = {
'IMPORT_MINIFIGURE': 'import_minifigure',
'IMPORT_SET': 'import_set',
'LOAD_MINIFIGURE': 'load_minifigure',
'LOAD_PART': 'load_part',
'LOAD_PART_COLORS': 'load_part_colors',
'LOAD_PEERON_PAGES': 'load_peeron_pages',
'LOAD_SET': 'load_set',
'MINIFIGURE_LOADED': 'minifigure_loaded',
'PART_COLORS_LOADED': 'part_colors_loaded',
'PART_LOADED': 'part_loaded',
'PROGRESS': 'progress',
'SET_LOADED': 'set_loaded',
}
@@ -74,8 +68,6 @@ class BrickSocket(object):
)
# Inject CORS if a domain is defined
# Note: For reverse proxy deployments, leave BK_DOMAIN_NAME empty to allow all origins
# When empty, Socket.IO defaults to permissive CORS which works with reverse proxies
if app.config['DOMAIN_NAME'] != '':
kwargs['cors_allowed_origins'] = app.config['DOMAIN_NAME']
@@ -86,11 +78,6 @@ class BrickSocket(object):
**kwargs,
path=app.config['SOCKET_PATH'],
async_mode='gevent',
# Enable detailed logging in debug mode for troubleshooting
logger=app.config['DEBUG'],
# Ping/pong settings for mobile network resilience
ping_timeout=30, # Wait 30s for pong response before disconnecting
ping_interval=25, # Send ping every 25s to keep connection alive
)
# Store the socket in the app config
@@ -102,23 +89,9 @@ class BrickSocket(object):
self.connected()
@self.socket.on(MESSAGES['DISCONNECT'], namespace=self.namespace)
def disconnect(reason=None) -> None:
def disconnect() -> None:
self.disconnected()
@self.socket.on('connect_error', namespace=self.namespace)
def connect_error(data) -> None:
logger.error(f'Socket CONNECT_ERROR: {data}')
@self.socket.on_error(namespace=self.namespace)
def error_handler(e) -> None:
logger.error(f'Socket ERROR: {e}')
try:
user_agent = request.headers.get('User-Agent', 'unknown')
remote_addr = request.remote_addr
logger.error(f'Socket ERROR details: ip={remote_addr}, ua={user_agent[:80]}...')
except Exception:
pass
@self.socket.on(MESSAGES['DOWNLOAD_INSTRUCTIONS'], namespace=self.namespace) # noqa: E501
@authenticated_socket(self)
def download_instructions(data: dict[str, Any], /) -> None:
@@ -258,46 +231,6 @@ class BrickSocket(object):
from .individual_minifigure import IndividualMinifigure
IndividualMinifigure().load(self, data)
@self.socket.on(MESSAGES['LOAD_PART'], namespace=self.namespace)
def load_part(data: dict[str, Any], /) -> None:
logger.debug('Socket: LOAD_PART={data} (from: {fr})'.format(
data=data,
fr=request.sid, # type: ignore
))
from .individual_part import IndividualPart
IndividualPart().add(self, data)
@self.socket.on(MESSAGES['LOAD_PART_COLORS'], namespace=self.namespace)
def load_part_colors(data: dict[str, Any], /) -> None:
logger.debug('Socket: LOAD_PART_COLORS={data} (from: {fr})'.format(
data=data,
fr=request.sid, # type: ignore
))
from .individual_part import IndividualPart
IndividualPart().load_colors(self, data)
@self.socket.on(MESSAGES['CREATE_LOT'], namespace=self.namespace)
@rebrickable_socket(self)
def create_lot(data: dict[str, Any], /) -> None:
logger.debug('Socket: CREATE_LOT (from: {fr})'.format(
fr=request.sid, # type: ignore
))
from .individual_part_lot import IndividualPartLot
IndividualPartLot().create(self, data)
@self.socket.on(MESSAGES['CREATE_BULK_INDIVIDUAL_PARTS'], namespace=self.namespace)
@rebrickable_socket(self)
def create_bulk_individual_parts(data: dict[str, Any], /) -> None:
logger.debug('Socket: CREATE_BULK_INDIVIDUAL_PARTS (from: {fr})'.format(
fr=request.sid, # type: ignore
))
from .individual_part import IndividualPart
IndividualPart().create_bulk(self, data)
# Update the progress auto-incrementing
def auto_progress(
self,
@@ -323,32 +256,13 @@ class BrickSocket(object):
# Socket is connected
def connected(self, /) -> Tuple[str, int]:
# Get detailed connection info for debugging
try:
sid = request.sid # type: ignore
transport = request.environ.get('HTTP_UPGRADE', 'polling')
user_agent = request.headers.get('User-Agent', 'unknown')
remote_addr = request.remote_addr
# Check if it's likely a mobile device
is_mobile = any(x in user_agent.lower() for x in ['iphone', 'ipad', 'android', 'mobile'])
logger.info(
f'Socket CONNECTED: sid={sid}, transport={transport}, '
f'ip={remote_addr}, mobile={is_mobile}, ua={user_agent[:80]}...'
)
except Exception as e:
logger.warning(f'Socket connected but failed to get details: {e}')
logger.debug('Socket: client connected')
return '', 301
# Socket is disconnected
def disconnected(self, /) -> None:
try:
sid = request.sid # type: ignore
logger.info(f'Socket DISCONNECTED: sid={sid}')
except Exception as e:
logger.info(f'Socket disconnected (sid unavailable): {e}')
logger.debug('Socket: client disconnected')
# Emit a message through the socket
def emit(self, name: str, *arg, all=False) -> None:
@@ -1,24 +0,0 @@
-- A bit unsafe as it does not use a prepared statement but it
-- should not be possible to inject anything through the {{ id }} context
BEGIN TRANSACTION;
-- Delete associated parts first
DELETE FROM "bricktracker_individual_minifigure_parts"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete metadata from consolidated tables
DELETE FROM "bricktracker_set_owners"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_statuses"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_tags"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete the individual minifigure itself
DELETE FROM "bricktracker_individual_minifigures"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
COMMIT;
@@ -0,0 +1,19 @@
-- Delete individual minifigure parts
DELETE FROM "bricktracker_individual_minifigure_parts"
WHERE "id" = :id;
-- Delete individual minifigure owners
DELETE FROM "bricktracker_individual_minifigure_owners"
WHERE "id" = :id;
-- Delete individual minifigure tags
DELETE FROM "bricktracker_individual_minifigure_tags"
WHERE "id" = :id;
-- Delete individual minifigure statuses
DELETE FROM "bricktracker_individual_minifigure_statuses"
WHERE "id" = :id;
-- Delete the individual minifigure itself
DELETE FROM "bricktracker_individual_minifigures"
WHERE "id" = :id;
@@ -4,16 +4,12 @@ INSERT OR IGNORE INTO "bricktracker_individual_minifigures" (
"quantity",
"description",
"storage",
"purchase_location",
"purchase_date",
"purchase_price"
"purchase_location"
) VALUES (
:id,
:figure,
:quantity,
:description,
:storage,
:purchase_location,
:purchase_date,
:purchase_price
:purchase_location
)
@@ -1,43 +0,0 @@
-- List all individual minifigures
SELECT
"bricktracker_individual_minifigures"."id",
"bricktracker_individual_minifigures"."figure",
"bricktracker_individual_minifigures"."quantity",
"bricktracker_individual_minifigures"."description",
"bricktracker_individual_minifigures"."storage",
"bricktracker_individual_minifigures"."purchase_location",
"rebrickable_minifigures"."number",
"rebrickable_minifigures"."name",
"rebrickable_minifigures"."image",
"rebrickable_minifigures"."number_of_parts",
0 AS "total_missing",
0 AS "total_damaged"{% if owners %},
{{ owners }}{% endif %}{% if statuses %},
{{ statuses }}{% endif %}{% if tags %},
{{ tags }}{% endif %}
FROM "bricktracker_individual_minifigures"
INNER JOIN "rebrickable_minifigures"
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
-- LEFT JOINs for metadata (owners, statuses, tags use separate dynamic column tables)
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_owners"."id"
LEFT JOIN "bricktracker_set_statuses"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_statuses"."id"
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_tags"."id"
{% if order %}
ORDER BY {{ order }}
{% endif %}
{% if limit %}
LIMIT {{ limit }}
{% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}
@@ -1,48 +0,0 @@
-- Get all individual minifigure instances for a specific purchase location
SELECT
"bricktracker_individual_minifigures"."id",
"bricktracker_individual_minifigures"."figure",
"bricktracker_individual_minifigures"."quantity",
"bricktracker_individual_minifigures"."description",
"bricktracker_individual_minifigures"."storage",
"bricktracker_individual_minifigures"."purchase_location",
"rebrickable_minifigures"."number",
"rebrickable_minifigures"."name",
"rebrickable_minifigures"."image",
"rebrickable_minifigures"."number_of_parts",
"storage_meta"."name" AS "storage_name",
"purchase_meta"."name" AS "purchase_location_name",
IFNULL("problem_join"."total_missing", 0) AS "total_missing",
IFNULL("problem_join"."total_damaged", 0) AS "total_damaged"
FROM "bricktracker_individual_minifigures"
INNER JOIN "rebrickable_minifigures"
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
LEFT JOIN "bricktracker_metadata_storages" AS "storage_meta"
ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
LEFT JOIN (
SELECT
"bricktracker_individual_minifigure_parts"."id",
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
FROM "bricktracker_individual_minifigure_parts"
GROUP BY "bricktracker_individual_minifigure_parts"."id"
) "problem_join"
ON "bricktracker_individual_minifigures"."id" = "problem_join"."id"
WHERE "bricktracker_individual_minifigures"."purchase_location" IS NOT DISTINCT FROM :purchase_location
{% if order %}
ORDER BY {{ order }}
{% else %}
ORDER BY "bricktracker_individual_minifigures"."rowid" DESC
{% endif %}
{% if limit %}
LIMIT {{ limit }}
{% endif %}
@@ -0,0 +1,10 @@
INSERT INTO "bricktracker_individual_minifigure_owners" (
"id",
"{{name}}"
) VALUES (
:id,
:state
)
ON CONFLICT("id")
DO UPDATE SET "{{name}}" = :state
WHERE "bricktracker_individual_minifigure_owners"."id" IS NOT DISTINCT FROM :id
@@ -0,0 +1,10 @@
INSERT INTO "bricktracker_individual_minifigure_statuses" (
"id",
"{{name}}"
) VALUES (
:id,
:state
)
ON CONFLICT("id")
DO UPDATE SET "{{name}}" = :state
WHERE "bricktracker_individual_minifigure_statuses"."id" IS NOT DISTINCT FROM :id
@@ -0,0 +1,10 @@
INSERT INTO "bricktracker_individual_minifigure_tags" (
"id",
"{{name}}"
) VALUES (
:id,
:state
)
ON CONFLICT("id")
DO UPDATE SET "{{name}}" = :state
WHERE "bricktracker_individual_minifigure_tags"."id" IS NOT DISTINCT FROM :id
@@ -6,19 +6,12 @@ SELECT
"bricktracker_individual_minifigures"."description",
"bricktracker_individual_minifigures"."storage",
"bricktracker_individual_minifigures"."purchase_location",
"bricktracker_individual_minifigures"."purchase_date",
"bricktracker_individual_minifigures"."purchase_price",
"rebrickable_minifigures"."number",
"rebrickable_minifigures"."name",
"rebrickable_minifigures"."image",
"rebrickable_minifigures"."number_of_parts",
"storage_meta"."name" AS "storage_name",
"purchase_meta"."name" AS "purchase_location_name",
IFNULL("problem_join"."total_missing", 0) AS "total_missing",
IFNULL("problem_join"."total_damaged", 0) AS "total_damaged"{% if owners %},
{{ owners }}{% endif %}{% if statuses %},
{{ statuses }}{% endif %}{% if tags %},
{{ tags }}{% endif %}
"purchase_meta"."name" AS "purchase_location_name"{{ owners }}{{ statuses }}{{ tags }}
FROM "bricktracker_individual_minifigures"
INNER JOIN "rebrickable_minifigures"
@@ -30,23 +23,13 @@ ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
LEFT JOIN "bricktracker_individual_minifigure_owners"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_owners"."id"
LEFT JOIN "bricktracker_set_statuses"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_set_statuses"."id"
LEFT JOIN "bricktracker_individual_minifigure_statuses"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_statuses"."id"
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_set_tags"."id"
LEFT JOIN (
SELECT
"bricktracker_individual_minifigure_parts"."id",
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
FROM "bricktracker_individual_minifigure_parts"
GROUP BY "bricktracker_individual_minifigure_parts"."id"
) "problem_join"
ON "bricktracker_individual_minifigures"."id" = "problem_join"."id"
LEFT JOIN "bricktracker_individual_minifigure_tags"
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_tags"."id"
WHERE "bricktracker_individual_minifigures"."id" = :id
@@ -28,14 +28,14 @@ ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_owners"."id"
LEFT JOIN "bricktracker_individual_minifigure_owners"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_owners"."id"
LEFT JOIN "bricktracker_set_statuses"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_statuses"."id"
LEFT JOIN "bricktracker_individual_minifigure_statuses"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_statuses"."id"
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_set_tags"."id"
LEFT JOIN "bricktracker_individual_minifigure_tags"
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_tags"."id"
LEFT JOIN (
SELECT
@@ -3,7 +3,5 @@ SET
"quantity" = :quantity,
"description" = :description,
"storage" = :storage,
"purchase_location" = :purchase_location,
"purchase_date" = :purchase_date,
"purchase_price" = :purchase_price
"purchase_location" = :purchase_location
WHERE "id" = :id
@@ -1,17 +0,0 @@
-- A bit unsafe as it does not use a prepared statement but it
-- should not be possible to inject anything through the {{ id }} context
BEGIN TRANSACTION;
-- Delete metadata from consolidated tables
DELETE FROM "bricktracker_set_owners"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_tags"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete the individual part itself
DELETE FROM "bricktracker_individual_parts"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
COMMIT;
@@ -1,30 +0,0 @@
-- Insert a new individual part
INSERT INTO "bricktracker_individual_parts" (
"id",
"part",
"color",
"quantity",
"missing",
"damaged",
"checked",
"description",
"lot_id",
"storage",
"purchase_location",
"purchase_date",
"purchase_price"
) VALUES (
:id,
:part,
:color,
:quantity,
:missing,
:damaged,
:checked,
:description,
:lot_id,
:storage,
:purchase_location,
:purchase_date,
:purchase_price
)
@@ -1,30 +0,0 @@
-- Insert an individual part that belongs to a lot
INSERT INTO "bricktracker_individual_parts" (
"id",
"part",
"color",
"quantity",
"missing",
"damaged",
"checked",
"description",
"storage",
"purchase_location",
"purchase_date",
"purchase_price",
"lot_id"
) VALUES (
:id,
:part,
:color,
:quantity,
0,
0,
0,
NULL,
NULL,
NULL,
NULL,
NULL,
:lot_id
)
@@ -1,42 +0,0 @@
-- List all individual parts
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."lot_id",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name" AS "part_name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."category",
"rebrickable_parts"."image",
"rebrickable_parts"."image_id",
"rebrickable_parts"."url" AS "part_url",
"rebrickable_parts"."bricklink_part_num",
"rebrickable_parts"."bricklink_color_id",
"rebrickable_parts"."bricklink_color_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
{% if order %}
ORDER BY {{ order }}
{% endif %}
{% if limit %}
LIMIT {{ limit }}
{% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}
@@ -1,31 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."color" = :color
ORDER BY "bricktracker_individual_parts"."part"
@@ -1,31 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."part" = :part
ORDER BY "bricktracker_individual_parts"."color"
@@ -1,34 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."lot_id",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."part" = :part
AND "bricktracker_individual_parts"."color" = :color
AND "bricktracker_individual_parts"."lot_id" IS NULL
ORDER BY "bricktracker_individual_parts"."id"
@@ -1,31 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."storage" = :storage
ORDER BY "bricktracker_individual_parts"."part", "bricktracker_individual_parts"."color"
@@ -1,32 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."missing" > 0
OR "bricktracker_individual_parts"."damaged" > 0
ORDER BY "bricktracker_individual_parts"."part", "bricktracker_individual_parts"."color"
@@ -1,32 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM :purchase_location
AND "bricktracker_individual_parts"."lot_id" IS NULL
ORDER BY "bricktracker_individual_parts"."part", "bricktracker_individual_parts"."color"
@@ -1,31 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM :storage
ORDER BY "bricktracker_individual_parts"."part", "bricktracker_individual_parts"."color"
@@ -1,31 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_parts"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_parts"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."storage" IS NULL
ORDER BY "bricktracker_individual_parts"."part", "bricktracker_individual_parts"."color"
@@ -1,44 +0,0 @@
-- Select a specific individual part by UUID
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."lot_id",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"rebrickable_parts"."name" AS "part_name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."category",
"rebrickable_parts"."image",
"rebrickable_parts"."image_id",
"rebrickable_parts"."url",
"rebrickable_parts"."bricklink_part_num",
"rebrickable_parts"."bricklink_color_id",
"rebrickable_parts"."bricklink_color_name"
{% if owners %},{{ owners }}{% endif %}
{% if statuses %},{{ statuses }}{% endif %}
{% if tags %},{{ tags }}{% endif %}
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_individual_parts"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
LEFT JOIN "bricktracker_set_statuses"
ON "bricktracker_individual_parts"."id" IS NOT DISTINCT FROM "bricktracker_set_statuses"."id"
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_individual_parts"."id" IS NOT DISTINCT FROM "bricktracker_set_tags"."id"
WHERE "bricktracker_individual_parts"."id" = :id;
@@ -1,3 +0,0 @@
UPDATE "bricktracker_individual_parts"
SET "checked" = :checked
WHERE "id" = :id
@@ -1,3 +0,0 @@
UPDATE "bricktracker_individual_parts"
SET "damaged" = :damaged
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update description for an individual part
UPDATE "bricktracker_individual_parts"
SET "description" = :description
WHERE "id" = :id;
@@ -1,4 +0,0 @@
-- Update a specific field in bricktracker_individual_parts
UPDATE "bricktracker_individual_parts"
SET "{{ field }}" = :value
WHERE "id" = :id
@@ -1,3 +0,0 @@
UPDATE "bricktracker_individual_parts"
SET "missing" = :missing
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update quantity for an individual part
UPDATE "bricktracker_individual_parts"
SET "quantity" = :quantity
WHERE "id" = :id;
@@ -1,9 +0,0 @@
UPDATE "bricktracker_individual_parts"
SET
"quantity" = :quantity,
"description" = :description,
"storage" = :storage,
"purchase_location" = :purchase_location,
"purchase_date" = :purchase_date,
"purchase_price" = :purchase_price
WHERE "id" = :id
@@ -1,22 +0,0 @@
-- A bit unsafe as it does not use a prepared statement but it
-- should not be possible to inject anything through the {{ id }} context
BEGIN TRANSACTION;
-- Delete all individual parts associated with this lot
DELETE FROM "bricktracker_individual_parts"
WHERE "lot_id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete lot owners (using consolidated metadata table)
DELETE FROM "bricktracker_set_owners"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete lot tags (using consolidated metadata table)
DELETE FROM "bricktracker_set_tags"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete the lot itself
DELETE FROM "bricktracker_individual_part_lots"
WHERE "id" IS NOT DISTINCT FROM '{{ id }}';
COMMIT;
@@ -1,19 +0,0 @@
INSERT INTO "bricktracker_individual_part_lots" (
"id",
"name",
"description",
"created_date",
"storage",
"purchase_location",
"purchase_date",
"purchase_price"
) VALUES (
:id,
:name,
:description,
:created_date,
:storage,
:purchase_location,
:purchase_date,
:purchase_price
)
@@ -1,21 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,23 +0,0 @@
SELECT DISTINCT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
INNER JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
WHERE "bricktracker_individual_parts"."part" = :part
AND "bricktracker_individual_parts"."color" = :color
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,22 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
WHERE "bricktracker_individual_part_lots"."storage" = :storage
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,26 +0,0 @@
SELECT
"bricktracker_individual_parts"."id",
"bricktracker_individual_parts"."part",
"bricktracker_individual_parts"."color",
"bricktracker_individual_parts"."quantity",
"bricktracker_individual_parts"."missing",
"bricktracker_individual_parts"."damaged",
"bricktracker_individual_parts"."checked",
"bricktracker_individual_parts"."description",
"bricktracker_individual_parts"."storage",
"bricktracker_individual_parts"."purchase_location",
"bricktracker_individual_parts"."purchase_date",
"bricktracker_individual_parts"."purchase_price",
"bricktracker_individual_parts"."lot_id",
"rebrickable_parts"."name",
"rebrickable_parts"."color_name",
"rebrickable_parts"."color_rgb",
"rebrickable_parts"."color_transparent",
"rebrickable_parts"."image",
"rebrickable_parts"."url"
FROM "bricktracker_individual_parts"
INNER JOIN "rebrickable_parts"
ON "bricktracker_individual_parts"."part" = "rebrickable_parts"."part"
AND "bricktracker_individual_parts"."color" = "rebrickable_parts"."color_id"
WHERE "bricktracker_individual_parts"."lot_id" = :lot_id
ORDER BY "rebrickable_parts"."name" ASC, "bricktracker_individual_parts"."color" ASC
@@ -1,23 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
INNER JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
WHERE "bricktracker_individual_parts"."missing" > 0
OR "bricktracker_individual_parts"."damaged" > 0
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,22 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
WHERE "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM :purchase_location
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,22 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
WHERE "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM :storage
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,22 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name",
COUNT("bricktracker_individual_parts"."id") AS "part_count"
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_individual_parts"
ON "bricktracker_individual_part_lots"."id" = "bricktracker_individual_parts"."lot_id"
WHERE "bricktracker_individual_part_lots"."storage" IS NULL
GROUP BY "bricktracker_individual_part_lots"."id"
ORDER BY "bricktracker_individual_part_lots"."created_date" DESC
@@ -1,28 +0,0 @@
SELECT
"bricktracker_individual_part_lots"."id",
"bricktracker_individual_part_lots"."name",
"bricktracker_individual_part_lots"."description",
"bricktracker_individual_part_lots"."created_date",
"bricktracker_individual_part_lots"."storage",
"bricktracker_individual_part_lots"."purchase_location",
"bricktracker_individual_part_lots"."purchase_date",
"bricktracker_individual_part_lots"."purchase_price",
"bricktracker_metadata_storages"."name" AS "storage_name",
"bricktracker_metadata_purchase_locations"."name" AS "purchase_location_name"
{% if owners %},{{ owners }}{% endif %}
{% if tags %},{{ tags }}{% endif %}
FROM "bricktracker_individual_part_lots"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_individual_part_lots"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
LEFT JOIN "bricktracker_metadata_purchase_locations"
ON "bricktracker_individual_part_lots"."purchase_location" IS NOT DISTINCT FROM "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_individual_part_lots"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
-- Note: Part lots don't have statuses, only owners and tags
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_individual_part_lots"."id" IS NOT DISTINCT FROM "bricktracker_set_tags"."id"
WHERE "bricktracker_individual_part_lots"."id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot description
UPDATE "bricktracker_individual_part_lots"
SET "description" = :description
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot name
UPDATE "bricktracker_individual_part_lots"
SET "name" = :name
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot purchase date
UPDATE "bricktracker_individual_part_lots"
SET "purchase_date" = :purchase_date
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot purchase location
UPDATE "bricktracker_individual_part_lots"
SET "purchase_location" = :purchase_location
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot purchase price
UPDATE "bricktracker_individual_part_lots"
SET "purchase_price" = :purchase_price
WHERE "id" = :id
@@ -1,4 +0,0 @@
-- Update individual part lot storage
UPDATE "bricktracker_individual_part_lots"
SET "storage" = :storage
WHERE "id" = :id
+1 -1
View File
@@ -1,4 +1,4 @@
-- description: Performance optimization indexes
-- Migration 0019: Performance optimization indexes
-- High-impact composite index for problem parts aggregation
-- Used in set listings, statistics, and problem reports
+123 -49
View File
@@ -1,58 +1,132 @@
-- description: Change set number column from INTEGER to TEXT to support alphanumeric set numbers
-- Migration 0020: Add individual minifigures and individual parts tables
-- Temporarily disable foreign key constraints for this migration
-- This is necessary because we're recreating a table that other tables reference
-- We verify integrity at the end to ensure safety
PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
-- Create new table with TEXT number column
CREATE TABLE "rebrickable_sets_new" (
"set" TEXT NOT NULL,
"number" TEXT NOT NULL,
"version" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"year" INTEGER NOT NULL,
"theme_id" INTEGER NOT NULL,
"number_of_parts" INTEGER NOT NULL,
"image" TEXT,
"url" TEXT,
"last_modified" TEXT,
PRIMARY KEY("set")
-- Individual minifigures table - tracks individual minifigures not associated with sets
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigures" (
"id" TEXT NOT NULL,
"figure" TEXT NOT NULL,
"quantity" INTEGER NOT NULL DEFAULT 1,
"description" TEXT,
"storage" TEXT, -- Storage bin location
"purchase_date" REAL, -- Purchase date
"purchase_location" TEXT, -- Purchase location
"purchase_price" REAL, -- Purchase price
PRIMARY KEY("id"),
FOREIGN KEY("figure") REFERENCES "rebrickable_minifigures"("figure"),
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
);
-- Copy all data from old table to new table
-- Cast INTEGER number to TEXT explicitly
INSERT INTO "rebrickable_sets_new"
SELECT
"set",
CAST("number" AS TEXT),
"version",
"name",
"year",
"theme_id",
"number_of_parts",
"image",
"url",
"last_modified"
FROM "rebrickable_sets";
-- Individual minifigure statuses
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_statuses" (
"id" TEXT NOT NULL,
"status_minifigures_collected" BOOLEAN NOT NULL DEFAULT 0,
"status_set_checked" BOOLEAN NOT NULL DEFAULT 0,
"status_set_collected" BOOLEAN NOT NULL DEFAULT 0,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
);
-- Drop old table
DROP TABLE "rebrickable_sets";
-- Individual minifigure owners
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_owners" (
"id" TEXT NOT NULL,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
);
-- Rename new table to original name
ALTER TABLE "rebrickable_sets_new" RENAME TO "rebrickable_sets";
-- Individual minifigure tags
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_tags" (
"id" TEXT NOT NULL,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
);
-- Recreate the index
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_number_version
ON rebrickable_sets(number, version);
-- Parts table for individual minifigures - tracks constituent parts
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_parts" (
"id" TEXT NOT NULL,
"part" TEXT NOT NULL,
"color" INTEGER NOT NULL,
"spare" BOOLEAN NOT NULL,
"quantity" INTEGER NOT NULL,
"element" INTEGER,
"rebrickable_inventory" INTEGER NOT NULL,
"missing" INTEGER NOT NULL DEFAULT 0,
"damaged" INTEGER NOT NULL DEFAULT 0,
"checked" BOOLEAN DEFAULT 0,
PRIMARY KEY("id", "part", "color", "spare"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id"),
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id")
);
-- Verify foreign key integrity before committing
-- This ensures we haven't broken any references
PRAGMA foreign_key_check;
-- Individual parts table - tracks individual parts not associated with sets
CREATE TABLE IF NOT EXISTS "bricktracker_individual_parts" (
"id" TEXT NOT NULL,
"part" TEXT NOT NULL,
"color" INTEGER NOT NULL,
"quantity" INTEGER NOT NULL DEFAULT 1,
"description" TEXT,
"storage" TEXT, -- Storage bin location
"purchase_date" REAL, -- Purchase date
"purchase_location" TEXT, -- Purchase location
"purchase_price" REAL, -- Purchase price
PRIMARY KEY("id"),
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id"),
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
);
COMMIT;
-- Individual part owners
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_owners" (
"id" TEXT NOT NULL,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
);
-- Re-enable foreign key constraints
PRAGMA foreign_keys=ON;
-- Individual part tags
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_tags" (
"id" TEXT NOT NULL,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
);
-- Individual part statuses
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_statuses" (
"id" TEXT NOT NULL,
"status_minifigures_collected" BOOLEAN NOT NULL DEFAULT 0,
"status_set_checked" BOOLEAN NOT NULL DEFAULT 0,
"status_set_collected" BOOLEAN NOT NULL DEFAULT 0,
PRIMARY KEY("id"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
);
-- Indexes for individual minifigures
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_figure
ON bricktracker_individual_minifigures(figure);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_storage
ON bricktracker_individual_minifigures(storage);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_location
ON bricktracker_individual_minifigures(purchase_location);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_date
ON bricktracker_individual_minifigures(purchase_date);
-- Indexes for individual minifigure parts
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_id_missing_damaged
ON bricktracker_individual_minifigure_parts(id, missing, damaged);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_part_color
ON bricktracker_individual_minifigure_parts(part, color);
-- Indexes for individual parts
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_part_color
ON bricktracker_individual_parts(part, color);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_storage
ON bricktracker_individual_parts(storage);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_location
ON bricktracker_individual_parts(purchase_location);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_date
ON bricktracker_individual_parts(purchase_date);
+17 -82
View File
@@ -1,88 +1,23 @@
-- description: Add individual minifigures and individual parts tables
-- Migration 0021: Add existing owner/tag columns to individual minifigure and individual part metadata tables
-- Individual minifigures table - tracks individual minifigures not associated with sets
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigures" (
"id" TEXT NOT NULL,
"figure" TEXT NOT NULL,
"quantity" INTEGER NOT NULL DEFAULT 1,
"description" TEXT,
"storage" TEXT, -- Storage bin location
"purchase_date" REAL, -- Purchase date
"purchase_location" TEXT, -- Purchase location
"purchase_price" REAL, -- Purchase price
PRIMARY KEY("id"),
FOREIGN KEY("figure") REFERENCES "rebrickable_minifigures"("figure"),
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
);
-- Add owner columns to individual minifigure owners table
ALTER TABLE "bricktracker_individual_minifigure_owners"
ADD COLUMN "owner_32479d0a_cd3c_43c6_aa16_b3f378915b13" BOOLEAN NOT NULL DEFAULT 0;
-- Metadata for individual minifigures: use bricktracker_set_owners, bricktracker_set_tags, bricktracker_set_statuses tables
ALTER TABLE "bricktracker_individual_minifigure_owners"
ADD COLUMN "owner_2f07518d_40e1_4279_b0d0_aa339f195cbf" BOOLEAN NOT NULL DEFAULT 0;
-- Parts table for individual minifigures - tracks constituent parts
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_parts" (
"id" TEXT NOT NULL,
"part" TEXT NOT NULL,
"color" INTEGER NOT NULL,
"spare" BOOLEAN NOT NULL,
"quantity" INTEGER NOT NULL,
"element" INTEGER,
"rebrickable_inventory" INTEGER NOT NULL,
"missing" INTEGER NOT NULL DEFAULT 0,
"damaged" INTEGER NOT NULL DEFAULT 0,
"checked" BOOLEAN DEFAULT 0,
PRIMARY KEY("id", "part", "color", "spare"),
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id"),
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id")
);
-- Add tag columns to individual minifigure tags table
ALTER TABLE "bricktracker_individual_minifigure_tags"
ADD COLUMN "tag_b1b5c316_5caf_4b82_a085_ac4c7ab9b8db" BOOLEAN NOT NULL DEFAULT 0;
-- Individual parts table - tracks individual parts not associated with sets
CREATE TABLE IF NOT EXISTS "bricktracker_individual_parts" (
"id" TEXT NOT NULL,
"part" TEXT NOT NULL,
"color" INTEGER NOT NULL,
"quantity" INTEGER NOT NULL DEFAULT 1,
"description" TEXT,
"storage" TEXT, -- Storage bin location
"purchase_date" REAL, -- Purchase date
"purchase_location" TEXT, -- Purchase location
"purchase_price" REAL, -- Purchase price
PRIMARY KEY("id"),
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id"),
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
);
-- Add owner columns to individual part owners table
ALTER TABLE "bricktracker_individual_part_owners"
ADD COLUMN "owner_32479d0a_cd3c_43c6_aa16_b3f378915b13" BOOLEAN NOT NULL DEFAULT 0;
-- Metadata for individual parts: use bricktracker_set_owners, bricktracker_set_tags, bricktracker_set_statuses tables
ALTER TABLE "bricktracker_individual_part_owners"
ADD COLUMN "owner_2f07518d_40e1_4279_b0d0_aa339f195cbf" BOOLEAN NOT NULL DEFAULT 0;
-- Indexes for individual minifigures
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_figure
ON bricktracker_individual_minifigures(figure);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_storage
ON bricktracker_individual_minifigures(storage);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_location
ON bricktracker_individual_minifigures(purchase_location);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_date
ON bricktracker_individual_minifigures(purchase_date);
-- Indexes for individual minifigure parts
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_id_missing_damaged
ON bricktracker_individual_minifigure_parts(id, missing, damaged);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_part_color
ON bricktracker_individual_minifigure_parts(part, color);
-- Indexes for individual parts
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_part_color
ON bricktracker_individual_parts(part, color);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_storage
ON bricktracker_individual_parts(storage);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_location
ON bricktracker_individual_parts(purchase_location);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_date
ON bricktracker_individual_parts(purchase_date);
-- Add tag columns to individual part tags table
ALTER TABLE "bricktracker_individual_part_tags"
ADD COLUMN "tag_b1b5c316_5caf_4b82_a085_ac4c7ab9b8db" BOOLEAN NOT NULL DEFAULT 0;
-91
View File
@@ -1,91 +0,0 @@
-- description: Add individual part lots system for bulk/cart adding of parts
BEGIN TRANSACTION;
-- Create individual part lots table
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_lots" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT,
"description" TEXT,
"created_date" REAL NOT NULL,
"storage" TEXT,
"purchase_location" TEXT,
"purchase_date" REAL,
"purchase_price" REAL,
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id") ON DELETE SET NULL,
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id") ON DELETE SET NULL
);
-- Create index for faster lookups
CREATE INDEX IF NOT EXISTS "idx_individual_part_lots_created_date"
ON "bricktracker_individual_part_lots"("created_date");
-- Add missing/damaged/checked fields to individual parts table
ALTER TABLE "bricktracker_individual_parts"
ADD COLUMN "missing" INTEGER NOT NULL DEFAULT 0;
ALTER TABLE "bricktracker_individual_parts"
ADD COLUMN "damaged" INTEGER NOT NULL DEFAULT 0;
ALTER TABLE "bricktracker_individual_parts"
ADD COLUMN "checked" BOOLEAN NOT NULL DEFAULT 0;
-- Add lot_id column to individual parts table with foreign key constraint
-- Note: SQLite doesn't support ALTER TABLE ADD CONSTRAINT for FK, so we need to recreate the table
-- Create new table with FK constraint
CREATE TABLE "bricktracker_individual_parts_new" (
"id" TEXT NOT NULL,
"part" TEXT NOT NULL,
"color" INTEGER NOT NULL,
"quantity" INTEGER NOT NULL DEFAULT 1,
"description" TEXT,
"storage" TEXT,
"purchase_date" REAL,
"purchase_location" TEXT,
"purchase_price" REAL,
"missing" INTEGER NOT NULL DEFAULT 0,
"damaged" INTEGER NOT NULL DEFAULT 0,
"checked" BOOLEAN NOT NULL DEFAULT 0,
"lot_id" TEXT,
PRIMARY KEY("id"),
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id"),
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id"),
FOREIGN KEY("lot_id") REFERENCES "bricktracker_individual_part_lots"("id") ON DELETE SET NULL
);
-- Copy existing data (set lot_id to NULL for all existing parts)
INSERT INTO "bricktracker_individual_parts_new"
(id, part, color, quantity, description, storage, purchase_date, purchase_location, purchase_price, missing, damaged, checked, lot_id)
SELECT
id, part, color, quantity, description, storage, purchase_date, purchase_location, purchase_price, missing, damaged, checked, NULL
FROM "bricktracker_individual_parts";
-- Drop old table
DROP TABLE "bricktracker_individual_parts";
-- Rename new table
ALTER TABLE "bricktracker_individual_parts_new" RENAME TO "bricktracker_individual_parts";
-- Recreate existing indexes
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_part_color
ON bricktracker_individual_parts(part, color);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_storage
ON bricktracker_individual_parts(storage);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_location
ON bricktracker_individual_parts(purchase_location);
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_date
ON bricktracker_individual_parts(purchase_date);
-- Create lot_id index
CREATE INDEX IF NOT EXISTS "idx_individual_parts_lot_id"
ON "bricktracker_individual_parts"("lot_id");
-- Metadata for individual part lots: use bricktracker_set_owners and bricktracker_set_tags tables
-- Note: Part lots don't have statuses, only owners and tags
COMMIT;
-13
View File
@@ -1,13 +0,0 @@
-- description: Add missing indexes for individual part lots optimization
BEGIN TRANSACTION;
-- Add storage index for lots table (for filtering by storage)
CREATE INDEX IF NOT EXISTS "idx_individual_part_lots_storage"
ON "bricktracker_individual_part_lots"("storage");
-- Add purchase location index for lots table (for filtering by purchase location)
CREATE INDEX IF NOT EXISTS "idx_individual_part_lots_purchase_location"
ON "bricktracker_individual_part_lots"("purchase_location");
COMMIT;
-16
View File
@@ -1,16 +0,0 @@
-- description: Create rebrickable_colors translation table for BrickLink color ID mapping
-- This table caches color information from Rebrickable API to avoid repeated API calls
-- and provides mapping between Rebrickable and BrickLink color IDs
CREATE TABLE IF NOT EXISTS "rebrickable_colors" (
"color_id" INTEGER PRIMARY KEY,
"name" TEXT NOT NULL,
"rgb" TEXT,
"is_trans" BOOLEAN,
"bricklink_color_id" INTEGER,
"bricklink_color_name" TEXT
);
-- Create index for faster lookups
CREATE INDEX IF NOT EXISTS "idx_rebrickable_colors_bricklink"
ON "rebrickable_colors"("bricklink_color_id");

Some files were not shown because too many files have changed in this diff Show More