Merge pull request 'release/1.3' (#116) from release/1.3 into master

Reviewed-on: #116
This commit was merged in pull request #116.
This commit is contained in:
2025-12-18 01:41:28 +01:00
142 changed files with 11107 additions and 747 deletions

View File

@@ -1,3 +1,23 @@
# ================================================================================================
# BrickTracker Configuration File
# ================================================================================================
#
# FILE LOCATION (v1.3+):
# ----------------------
# This file can be placed in two locations:
# 1. data/.env (RECOMMENDED) - Included in data volume backup, settings persist via admin panel
# 2. .env (root) - Backward compatible
#
# Priority: data/.env > .env (root)
#
# The application automatically detects and uses the correct location at runtime.
#
# For Docker:
# - Recommended: Place this file as data/.env (included in data volume)
# - Backward compatible: Keep as .env in root (add "env_file: .env" to compose.yaml)
#
# ================================================================================================
#
# Note on *_DEFAULT_ORDER # Note on *_DEFAULT_ORDER
# If set, it will append a direct ORDER BY <whatever you set> to the SQL query # If set, it will append a direct ORDER BY <whatever you set> to the SQL query
# while listing objects. You can look at the structure of the SQLite database to # while listing objects. You can look at the structure of the SQLite database to
@@ -32,15 +52,20 @@
# Default: https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color} # Default: https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}
# BK_BRICKLINK_LINK_PART_PATTERN= # BK_BRICKLINK_LINK_PART_PATTERN=
# Optional: Pattern of the link to Bricklink for a set. Will be passed to Python .format()
# Supports {set_num} parameter. Set numbers in format like '10255-1' are used.
# Default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num}
# BK_BRICKLINK_LINK_SET_PATTERN=
# Optional: Display Bricklink links wherever applicable # Optional: Display Bricklink links wherever applicable
# Default: false # Default: false
# BK_BRICKLINK_LINKS=true # BK_BRICKLINK_LINKS=true
# Optional: Path to the database. # Optional: Path to the database, relative to '/app/' folder
# Useful if you need it mounted in a Docker volume. Keep in mind that it will not # Useful if you need it mounted in a Docker volume. Keep in mind that it will not
# do any check on the existence of the path, or if it is dangerous. # do any check on the existence of the path, or if it is dangerous.
# Default: ./app.db # Default: data/app.db
# BK_DATABASE_PATH=/var/lib/bricktracker/app.db # BK_DATABASE_PATH=data/app.db
# Optional: Format of the timestamp added to the database file when downloading it # Optional: Format of the timestamp added to the database file when downloading it
# Check https://docs.python.org/3/library/time.html#time.strftime for format details # Check https://docs.python.org/3/library/time.html#time.strftime for format details
@@ -81,9 +106,9 @@
# Default: .pdf # Default: .pdf
# BK_INSTRUCTIONS_ALLOWED_EXTENSIONS=.pdf, .docx, .png # BK_INSTRUCTIONS_ALLOWED_EXTENSIONS=.pdf, .docx, .png
# Optional: Folder where to store the instructions, relative to the '/app/static/' folder # Optional: Folder where to store the instructions, relative to '/app/' folder
# Default: instructions # Default: data/instructions
# BK_INSTRUCTIONS_FOLDER=/var/lib/bricktracker/instructions/ # BK_INSTRUCTIONS_FOLDER=data/instructions
# Optional: Hide the 'Add' entry from the menu. Does not disable the route. # Optional: Hide the 'Add' entry from the menu. Does not disable the route.
# Default: false # Default: false
@@ -97,6 +122,14 @@
# Default: false # Default: false
# BK_HIDE_ADMIN=true # BK_HIDE_ADMIN=true
# Optional: Admin sections to expand by default (comma-separated list)
# Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
# Default: database (maintains original behavior with database section expanded)
# Examples:
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS=database,theme
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS=instructions,metadata
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS= (all sections collapsed)
# Optional: Hide the 'Instructions' entry from the menu. Does not disable the route. # Optional: Hide the 'Instructions' entry from the menu. Does not disable the route.
# Default: false # Default: false
# BK_HIDE_ALL_INSTRUCTIONS=true # BK_HIDE_ALL_INSTRUCTIONS=true
@@ -122,6 +155,10 @@
# Default: false # Default: false
# BK_HIDE_ALL_STORAGES=true # BK_HIDE_ALL_STORAGES=true
# Optional: Hide the 'Statistics' entry from the menu. Does not disable the route.
# Default: false
# BK_HIDE_STATISTICS=true
# Optional: Hide the 'Instructions' entry in a Set card # Optional: Hide the 'Instructions' entry in a Set card
# Default: false # Default: false
# BK_HIDE_SET_INSTRUCTIONS=true # BK_HIDE_SET_INSTRUCTIONS=true
@@ -134,21 +171,33 @@
# Default: false # Default: false
# BK_HIDE_TABLE_MISSING_PARTS=true # BK_HIDE_TABLE_MISSING_PARTS=true
# Optional: Hide the 'Checked' column from the parts table.
# Default: false
# BK_HIDE_TABLE_CHECKED_PARTS=true
# Optional: Hide the 'Wishlist' entry from the menu. Does not disable the route. # Optional: Hide the 'Wishlist' entry from the menu. Does not disable the route.
# Default: false # Default: false
# BK_HIDE_WISHES=true # BK_HIDE_WISHES=true
# Optional: Change the default order of minifigures. By default ordered by insertion order. # Optional: Change the default order of minifigures. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "rebrickable_minifigures"."figure": minifigure ID (fig-xxxxx) # - "rebrickable_minifigures"."figure": minifigure ID (e.g., "fig-001234")
# - "rebrickable_minifigures"."number": minifigure ID as an integer (xxxxx) # - "rebrickable_minifigures"."number": minifigure ID as an integer (e.g., 1234)
# - "rebrickable_minifigures"."name": minifigure name # - "rebrickable_minifigures"."name": minifigure name
# - "rebrickable_minifigures"."number_of_parts": number of parts in the minifigure
# - "bricktracker_minifigures"."quantity": quantity owned
# - "total_missing": number of missing parts (composite field)
# - "total_damaged": number of damaged parts (composite field)
# - "total_quantity": total quantity across all sets (composite field)
# - "total_sets": number of sets containing this minifigure (composite field)
# Default: "rebrickable_minifigures"."name" ASC # Default: "rebrickable_minifigures"."name" ASC
# BK_MINIFIGURES_DEFAULT_ORDER="rebrickable_minifigures"."name" ASC # Examples:
# BK_MINIFIGURES_DEFAULT_ORDER="rebrickable_minifigures"."number" DESC
# BK_MINIFIGURES_DEFAULT_ORDER="total_missing" DESC, "rebrickable_minifigures"."name" ASC
# Optional: Folder where to store the minifigures images, relative to the '/app/static/' folder # Optional: Folder where to store the minifigures images, relative to '/app/' folder
# Default: minifigs # Default: data/minifigures
# BK_MINIFIGURES_FOLDER=minifigures # BK_MINIFIGURES_FOLDER=data/minifigures
# Optional: Disable threading on the task executed by the socket. # Optional: Disable threading on the task executed by the socket.
# You should not need to change this parameter unless you are debugging something with the # You should not need to change this parameter unless you are debugging something with the
@@ -158,17 +207,67 @@
# Optional: Change the default order of parts. By default ordered by insertion order. # Optional: Change the default order of parts. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "bricktracker_parts"."part": part number # - "bricktracker_parts"."part": part number (e.g., "3001")
# - "bricktracker_parts"."spare": part is a spare part # - "bricktracker_parts"."spare": part is a spare part (0 or 1)
# - "bricktracker_parts"."quantity": quantity of this part
# - "bricktracker_parts"."missing": number of missing parts
# - "bricktracker_parts"."damaged": number of damaged parts
# - "rebrickable_parts"."name": part name # - "rebrickable_parts"."name": part name
# - "rebrickable_parts"."color_name": part color name # - "rebrickable_parts"."color_name": part color name
# - "total_missing": number of missing parts # - "total_missing": total missing across all sets (composite field)
# - "total_damaged": total damaged across all sets (composite field)
# - "total_quantity": total quantity across all sets (composite field)
# - "total_sets": number of sets containing this part (composite field)
# - "total_minifigures": number of minifigures with this part (composite field)
# Default: "rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "bricktracker_parts"."spare" ASC # Default: "rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "bricktracker_parts"."spare" ASC
# BK_PARTS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_parts"."name"."name" ASC # Examples:
# BK_PARTS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_parts"."name" ASC
# BK_PARTS_DEFAULT_ORDER="rebrickable_parts"."color_name" ASC, "rebrickable_parts"."name" ASC
# Optional: Folder where to store the parts images, relative to the '/app/static/' folder # Optional: Folder where to store the parts images, relative to '/app/' folder
# Default: parts # Default: data/parts
# BK_PARTS_FOLDER=parts # BK_PARTS_FOLDER=data/parts
# Optional: Enable server-side pagination for individual pages (recommended for large collections)
# When enabled, pages use server-side pagination with configurable page sizes
# When disabled, pages load all data at once with instant client-side search
# Default: false for all
# BK_SETS_SERVER_SIDE_PAGINATION=true
# BK_PARTS_SERVER_SIDE_PAGINATION=true
# BK_MINIFIGURES_SERVER_SIDE_PAGINATION=true
# BK_PROBLEMS_SERVER_SIDE_PAGINATION=true
# Optional: Number of parts to show per page on desktop devices (when server-side pagination is enabled)
# Default: 10
# BK_PARTS_PAGINATION_SIZE_DESKTOP=10
# Optional: Number of parts to show per page on mobile devices (when server-side pagination is enabled)
# Default: 5
# BK_PARTS_PAGINATION_SIZE_MOBILE=5
# Optional: Number of sets to show per page on desktop devices (when server-side pagination is enabled)
# Should be divisible by 4 for grid layout. Default: 12
# BK_SETS_PAGINATION_SIZE_DESKTOP=12
# Optional: Number of sets to show per page on mobile devices (when server-side pagination is enabled)
# Default: 4
# BK_SETS_PAGINATION_SIZE_MOBILE=4
# Optional: Number of minifigures to show per page on desktop devices (when server-side pagination is enabled)
# Default: 10
# BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP=10
# Optional: Number of minifigures to show per page on mobile devices (when server-side pagination is enabled)
# Default: 5
# BK_MINIFIGURES_PAGINATION_SIZE_MOBILE=5
# Optional: Number of problems to show per page on desktop devices (when server-side pagination is enabled)
# Default: 10
# BK_PROBLEMS_PAGINATION_SIZE_DESKTOP=10
# Optional: Number of problems to show per page on mobile devices (when server-side pagination is enabled)
# Default: 5
# BK_PROBLEMS_PAGINATION_SIZE_MOBILE=5
# Optional: Port the server will listen on. # Optional: Port the server will listen on.
# Default: 3333 # Default: 3333
@@ -185,9 +284,12 @@
# Optional: Change the default order of purchase locations. By default ordered by insertion order. # Optional: Change the default order of purchase locations. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "bricktracker_metadata_purchase_locations"."name" ASC: storage name # - "bricktracker_metadata_purchase_locations"."name": purchase location name
# - "bricktracker_metadata_purchase_locations"."rowid": insertion order (special column)
# Default: "bricktracker_metadata_purchase_locations"."name" ASC # Default: "bricktracker_metadata_purchase_locations"."name" ASC
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."name" ASC # Examples:
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."name" DESC
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."rowid" DESC
# Optional: Shuffle the lists on the front page. # Optional: Shuffle the lists on the front page.
# Default: false # Default: false
@@ -203,27 +305,54 @@
# Optional: URL of the image representing a missing image in Rebrickable # Optional: URL of the image representing a missing image in Rebrickable
# Default: https://rebrickable.com/static/img/nil.png # Default: https://rebrickable.com/static/img/nil.png
# BK_REBRICKABLE_IMAGE_NIL= # BK_REBRICKABLE_IMAGE_NIL=https://rebrickable.com/static/img/nil.png
# Optional: URL of the image representing a missing minifigure image in Rebrickable # Optional: URL of the image representing a missing minifigure image in Rebrickable
# Default: https://rebrickable.com/static/img/nil_mf.jpg # Default: https://rebrickable.com/static/img/nil_mf.jpg
# BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE= # BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE=https://rebrickable.com/static/img/nil_mf.jpg
# Optional: Pattern of the link to Rebrickable for a minifigure. Will be passed to Python .format() # Optional: Pattern of the link to Rebrickable for a minifigure. Will be passed to Python .format()
# Default: https://rebrickable.com/minifigs/{figure} # Default: https://rebrickable.com/minifigs/{figure}
# BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN= # BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN=https://rebrickable.com/minifigs/{figure}
# Optional: Pattern of the link to Rebrickable for a part. Will be passed to Python .format() # Optional: Pattern of the link to Rebrickable for a part. Will be passed to Python .format()
# Default: https://rebrickable.com/parts/{part}/_/{color} # Default: https://rebrickable.com/parts/{part}/_/{color}
# BK_REBRICKABLE_LINK_PART_PATTERN= # BK_REBRICKABLE_LINK_PART_PATTERN=https://rebrickable.com/parts/{part}/_/{color}
# Optional: Pattern of the link to Rebrickable for instructions. Will be passed to Python .format() # Optional: Pattern of the link to Rebrickable for instructions. Will be passed to Python .format()
# Default: https://rebrickable.com/instructions/{path} # Default: https://rebrickable.com/instructions/{path}
# BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN= # BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN=https://rebrickable.com/instructions/{path}
# Optional: User-Agent to use when querying Rebrickable outside of the Rebrick python library # Optional: User-Agent to use when querying Rebrickable and Peeron outside of the Rebrick python library
# Default: 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' # Default: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
# BK_REBRICKABLE_USER_AGENT= # BK_USER_AGENT=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
# Legacy: User-Agent for Rebrickable (use BK_USER_AGENT instead)
# BK_REBRICKABLE_USER_AGENT=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
# Optional: Delay in milliseconds between Peeron page downloads to avoid being potentially blocked
# Default: 1000
# BK_PEERON_DOWNLOAD_DELAY=1000
# Optional: Minimum image size (width/height) for valid Peeron instruction pages
# Images smaller than this are considered error placeholders and will be rejected
# Default: 100
# BK_PEERON_MIN_IMAGE_SIZE=100
# Optional: Pattern for Peeron instruction page URLs. Will be passed to Python .format()
# Supports {set_number} and {version_number} parameters
# Default: http://peeron.com/scans/{set_number}-{version_number}
# BK_PEERON_INSTRUCTION_PATTERN=
# Optional: Pattern for Peeron thumbnail URLs. Will be passed to Python .format()
# Supports {set_number} and {version_number} parameters
# Default: http://belay.peeron.com/thumbs/{set_number}-{version_number}/
# BK_PEERON_THUMBNAIL_PATTERN=
# Optional: Pattern for Peeron scan URLs. Will be passed to Python .format()
# Supports {set_number} and {version_number} parameters
# Default: http://belay.peeron.com/scans/{set_number}-{version_number}/
# BK_PEERON_SCAN_PATTERN=
# Optional: Display Rebrickable links wherever applicable # Optional: Display Rebrickable links wherever applicable
# Default: false # Default: false
@@ -238,27 +367,39 @@
# Default: https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date # Default: https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date
# BK_RETIRED_SETS_FILE_URL= # BK_RETIRED_SETS_FILE_URL=
# Optional: Path to the unofficial retired sets lists # Optional: Path to the unofficial retired sets lists, relative to '/app/' folder
# You can name it whatever you want, but content has to be a CSV # You can name it whatever you want, but content has to be a CSV
# Default: ./retired_sets.csv # Default: data/retired_sets.csv
# BK_RETIRED_SETS_PATH=/var/lib/bricktracker/retired_sets.csv # BK_RETIRED_SETS_PATH=data/retired_sets.csv
# Optional: Change the default order of sets. By default ordered by insertion order. # Optional: Change the default order of sets. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "rebrickable_sets"."set": set number as a string # - "rebrickable_sets"."set": set number as a string (e.g., "10255-1")
# - "rebrickable_sets"."number": the number part of set as an integer # - "rebrickable_sets"."number": the number part of set as text (e.g., "10255")
# - "rebrickable_sets"."version": the version part of set as an integer # - "rebrickable_sets"."version": the version part of set as an integer (e.g., 1)
# - "rebrickable_sets"."name": set name # - "rebrickable_sets"."name": set name
# - "rebrickable_sets"."year": set release year # - "rebrickable_sets"."year": set release year
# - "rebrickable_sets"."number_of_parts": set number of parts # - "rebrickable_sets"."number_of_parts": set number of parts
# - "total_missing": number of missing parts # - "bricktracker_sets"."purchase_date": purchase date (as REAL/Julian day)
# - "total_minifigures": number of minifigures # - "bricktracker_sets"."purchase_price": purchase price
# - "total_missing": number of missing parts (composite field)
# - "total_damaged": number of damaged parts (composite field)
# - "total_minifigures": number of minifigures (composite field)
# Default: "rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC # Default: "rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."year" ASC # Examples:
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."year" DESC, "rebrickable_sets"."name" ASC
# BK_SETS_DEFAULT_ORDER="rebrickable_sets"."number_of_parts" DESC
# BK_SETS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_sets"."year" ASC
# Optional: Folder where to store the sets images, relative to the '/app/static/' folder # Optional: Folder where to store the sets images, relative to '/app/' folder
# Default: sets # Default: data/sets
# BK_SETS_FOLDER=sets # BK_SETS_FOLDER=data/sets
# Optional: Enable set consolidation/grouping on the main sets page
# When enabled, multiple copies of the same set are grouped together showing instance count
# When disabled, each set copy is displayed individually (original behavior)
# Default: false
# BK_SETS_CONSOLIDATION=true
# Optional: Make the grid filters displayed by default, rather than collapsed # Optional: Make the grid filters displayed by default, rather than collapsed
# Default: false # Default: false
@@ -268,10 +409,18 @@
# Default: false # Default: false
# BK_SHOW_GRID_SORT=true # BK_SHOW_GRID_SORT=true
# Optional: Skip saving or displaying spare parts # Optional: Show duplicate filter button on sets page
# Default: true
# BK_SHOW_SETS_DUPLICATE_FILTER=true
# Optional: Skip importing spare parts when downloading sets from Rebrickable
# Default: false # Default: false
# BK_SKIP_SPARE_PARTS=true # BK_SKIP_SPARE_PARTS=true
# Optional: Hide spare parts from parts lists (spare parts must still be in database)
# Default: false
# BK_HIDE_SPARE_PARTS=true
# Optional: Namespace of the Socket.IO socket # Optional: Namespace of the Socket.IO socket
# Default: bricksocket # Default: bricksocket
# BK_SOCKET_NAMESPACE=customsocket # BK_SOCKET_NAMESPACE=customsocket
@@ -282,18 +431,21 @@
# Optional: Change the default order of storages. By default ordered by insertion order. # Optional: Change the default order of storages. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "bricktracker_metadata_storages"."name" ASC: storage name # - "bricktracker_metadata_storages"."name": storage name
# - "bricktracker_metadata_storages"."rowid": insertion order (special column)
# Default: "bricktracker_metadata_storages"."name" ASC # Default: "bricktracker_metadata_storages"."name" ASC
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."name" ASC # Examples:
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."name" DESC
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."rowid" DESC
# Optional: URL to the themes.csv.gz on Rebrickable # Optional: URL to the themes.csv.gz on Rebrickable
# Default: https://cdn.rebrickable.com/media/downloads/themes.csv.gz # Default: https://cdn.rebrickable.com/media/downloads/themes.csv.gz
# BK_THEMES_FILE_URL= # BK_THEMES_FILE_URL=
# Optional: Path to the themes file # Optional: Path to the themes file, relative to '/app/' folder
# You can name it whatever you want, but content has to be a CSV # You can name it whatever you want, but content has to be a CSV
# Default: ./themes.csv # Default: data/themes.csv
# BK_THEMES_PATH=/var/lib/bricktracker/themes.csv # BK_THEMES_PATH=data/themes.csv
# Optional: Timezone to use to display datetimes # Optional: Timezone to use to display datetimes
# Check your system for available timezone/TZ values # Check your system for available timezone/TZ values
@@ -305,11 +457,30 @@
# Default: false # Default: false
# BK_USE_REMOTE_IMAGES=true # BK_USE_REMOTE_IMAGES=true
# Optional: Change the default order of sets. By default ordered by insertion order. # Optional: Change the default order of wishlist sets. By default ordered by insertion order.
# Useful column names for this option are: # Useful column names for this option are:
# - "bricktracker_wishes"."set": set number as a string # - "bricktracker_wishes"."set": set number as a string (e.g., "10255-1")
# - "bricktracker_wishes"."name": set name # - "bricktracker_wishes"."name": set name
# - "bricktracker_wishes"."year": set release year # - "bricktracker_wishes"."year": set release year
# - "bricktracker_wishes"."number_of_parts": set number of parts # - "bricktracker_wishes"."number_of_parts": set number of parts
# - "bricktracker_wishes"."theme_id": theme ID
# - "bricktracker_wishes"."rowid": insertion order (special column)
# Default: "bricktracker_wishes"."rowid" DESC # Default: "bricktracker_wishes"."rowid" DESC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."set" DESC # Examples:
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."year" DESC, "bricktracker_wishes"."name" ASC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."number_of_parts" DESC
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."set" ASC
# Optional: Show collection growth charts on the statistics page
# Default: true
# BK_STATISTICS_SHOW_CHARTS=false
# Optional: Default state of statistics page sections (expanded or collapsed)
# When true, all sections start expanded. When false, all sections start collapsed.
# Default: true
# BK_STATISTICS_DEFAULT_EXPANDED=false
# Optional: Enable dark mode by default
# When true, the application starts in dark mode.
# Default: false
# BK_DARK_MODE=true

2
.gitignore vendored
View File

@@ -17,6 +17,7 @@ static/sets/
# IDE # IDE
.vscode/ .vscode/
*.code-workspace
# Temporary # Temporary
*.csv *.csv
@@ -33,3 +34,4 @@ vitepress/
# Local data # Local data
offline/ offline/
data/

View File

@@ -1,8 +1,210 @@
# Changelog # Changelog
## Unreleased ## 1.3
### 1.2.4 ### Breaking Changes
#### Data Folder Consolidation
> **Warning**
> **BREAKING CHANGE**: Version 1.3 consolidates all user data into a single `data/` folder for easier backup and volume mapping.
- **Path handling**: All relative paths are now resolved relative to the application root (`/app` in Docker)
- Example: `data/app.db``/app/data/app.db`
- **New default paths** (automatically used for new installations):
- Database: `data/app.db` (was: `app.db` in root)
- Configuration: `data/.env` (was: `.env` in root) - *optional, backward compatible*
- CSV files: `data/*.csv` (was: `*.csv` in root)
- Images/PDFs: `data/{sets,parts,minifigures,instructions}/` (was: `static/*`)
- **Configuration file (.env) location**:
- New recommended location: `data/.env` (included in data volume, settings persist)
- Backward compatible: `.env` in root still works (requires volume mount for admin panel persistence)
- Priority: `data/.env` > `.env` (automatic detection, no migration required)
- **Migration options**:
1. **Migrate to new structure** (recommended - single volume for all data including .env)
2. **Keep current setup** (backward compatible - old paths continue to work)
See [Migration Guide](docs/migration_guide.md) for detailed instructions
#### Default Minifigures Folder Change
> **Warning**
> **BREAKING CHANGE**: Default minifigures folder path changed from `minifigs` to `minifigures`
- **Impact**: Users who relied on the default `BK_MINIFIGURES_FOLDER` value (without explicitly setting it) will need to either:
1. Set `BK_MINIFIGURES_FOLDER=minifigs` in their environment to maintain existing behavior, or
2. Rename their existing `minifigs` folder to `minifigures`
- **No impact**: Users who already have `BK_MINIFIGURES_FOLDER` explicitly configured
- Improved consistency across documentation and Docker configurations
### New Features
- **Live Settings changes**
- Added live environment variable configuration management system
- Configuration Management interface in admin panel with live preview and badge system
- **Live settings**: Can be changed without application restart (menu visibility, table display, pagination, features)
- **Static settings**: Require restart but can be edited and saved to .env file (authentication, server, database, API keys)
- Advanced badge system showing value status: True/False for booleans, Set/Default/Unset for other values, Changed indicator
- Live API endpoints: `/admin/api/config/update` for immediate changes, `/admin/api/config/update-static` for .env updates
- Form pre-population with current values and automatic page reload after successful live updates
- Fixed environment variable lock detection in admin configuration panel
- Resolved bug where all variables appeared "locked" after saving live settings
- Lock detection now correctly identifies only Docker environment variables set before .env loading
- Variables set via Docker's `environment:` directive remain properly locked
- Variables from data/.env or root .env are correctly shown as editable
- Added configuration persistence warning in admin panel
- Warning banner shows when using .env in root (non-persistent)
- Success banner shows when using data/.env (persistent)
- Provides migration instructions directly in the UI
- **Spare Parts**
- Added spare parts control options
- `BK_SKIP_SPARE_PARTS`: Skip importing spare parts when downloading sets from Rebrickable (parts not saved to database)
- `BK_HIDE_SPARE_PARTS`: Hide spare parts from all parts lists (parts must still be in database)
- Both options are live-changeable in admin configuration panel
- Options can be used independently or together for flexible spare parts management
- Affects all parts displays: /parts page, set details accordion, minifigure parts, and problem parts
- **Pagination**
- Added individual pagination control system per entity type
- `BK_SETS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for sets
- `BK_PARTS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for parts
- `BK_MINIFIGURES_SERVER_SIDE_PAGINATION`: Enable/disable pagination for minifigures
- Device-specific pagination sizes (desktop/mobile) for each entity type
- Supports search, filtering, and sorting in both server-side and client-side modes
- **Peeron Instructions**
- Added Peeron instructions integration
- Full image caching system with automatic thumbnail generation
- Optimized HTTP calls by downloading full images once and generating thumbnails locally
- Automatic cache cleanup after PDF generation to save disk space
- **Parts checkmark**
- Added parts checking/inventory system
- New "Checked" column in parts tables for tracking inventory progress
- Checkboxes to mark parts as verified during set walkthrough
- `BK_HIDE_TABLE_CHECKED_PARTS`: Environment variable to hide checked column
- **Set Consolidation**
- Added set consolidation/grouping functionality
- Automatic grouping of duplicate sets on main sets page
- Shows instance count with stack icon badge (e.g., "3 copies")
- Expandable drawer interface to view all set copies individually
- Full set cards for each instance with all badges, statuses, and functionality
- `BK_SETS_CONSOLIDATION`: Environment variable to enable/disable consolidation (default: false)
- Backwards compatible - when disabled, behaves exactly like original individual view
- Improved theme filtering: handles duplicate theme names correctly
- Fixed set number sorting: proper numeric sorting in both ascending and descending order
- Mixed status indicators for consolidated sets: three-state checkboxes (unchecked/partial/checked) with count badges
- Template logic handles three states: none (0/2), all (2/2), partial (1/2) with visual indicators
- Purple overlay styling for partial states, disabled checkboxes for read-only consolidated status display
- Individual sets maintain full interactive checkbox functionality
- **Statistics**
- Added comprehensive statistics system (#91)
- New Statistics page with collection analytics
- Financial overview: total cost, average price, price range, investment tracking
- Collection metrics: total sets, unique sets, parts count, minifigures count
- Theme distribution statistics with clickable drill-down to filtered sets
- Storage location statistics showing sets per location with value calculations
- Purchase location analytics with spending patterns and date ranges
- Problem tracking: missing and damaged parts statistics
- Clickable numbers throughout statistics that filter to relevant sets
- `BK_HIDE_STATISTICS`: Environment variable to hide statistics menu item
- Year-based analytics: Sets by release year and purchases by year
- Sets by Release Year: Shows collection distribution across LEGO release years
- Purchases by Year: Tracks spending patterns and acquisition timeline
- Year summary with peak collection/spending years and timeline insights
- Enhanced statistics interface and functionality
- Collapsible sections: All statistics sections have clickable headers to expand/collapse
- Collection growth charts: Line charts showing sets, parts, and minifigures over time
- Configuration options: `BK_STATISTICS_SHOW_CHARTS` and `BK_STATISTICS_DEFAULT_EXPANDED` environment variables
- **Admin Page Section Expansion**
- Added configurable admin page section expansion
- `BK_ADMIN_DEFAULT_EXPANDED_SECTIONS`: Environment variable to specify which sections expand by default
- Accepts comma-separated list of section names (e.g., "database,theme,instructions")
- Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
- URL parameters take priority over configuration (e.g., `?open_database=1`)
- Database section expanded by default to maintain original behavior
- Smart metadata handling: sub-section expansion automatically expands parent metadata section
- **Duplicate Sets filter**
- Added duplicate sets filter functionality
- New filter button on Sets page to show only duplicate/consolidated sets
- `BK_SHOW_SETS_DUPLICATE_FILTER`: Environment variable to show/hide the filter button (default: true)
- Works with both server-side and client-side pagination modes
- Consolidated mode: Shows sets that have multiple instances
- Non-consolidated mode: Shows sets that appear multiple times in collection
- **Bricklink Links**
- Added BrickLink links for sets
- BrickLink badge links now appear on set cards and set details pages alongside Rebrickable links
- `BK_BRICKLINK_LINK_SET_PATTERN`: New environment variable for BrickLink set URL pattern (default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num})
- Controlled by existing `BK_BRICKLINK_LINKS` environment variable
- **Dark Mode**
- Added dark mode support
- `BK_DARK_MODE`: Environment variable to enable dark mode theme (default: false)
- Uses Bootstrap 5.3's native dark mode with `data-bs-theme` attribute
- Live-changeable via Admin > Live Settings
- Setting persists across sessions via .env file
- **Alphanumetic Set Number**
- Added alphanumeric set number support
- Database schema change: Set number column changed from INTEGER to TEXT
- Supports LEGO promotional and special edition sets with letters in their numbers
- Examples: "McDR6US-1", "COMCON035-1", "EG00021-1"
### Improvements
- Improved WebSocket/Socket.IO reliability for mobile devices
- Changed connection strategy to polling-first with automatic WebSocket upgrade
- Increased connection timeout to 30 seconds for slow mobile networks
- Added ping/pong keepalive settings (30s timeout, 25s interval)
- Improved server-side connection logging with user agent and transport details
- Fixed dynamic sort icons across all pages
- Sort icons now properly toggle between ascending/descending states
- Improved DataTable integration
- Disabled column header sorting when server-side pagination is enabled
- Prevents conflicting sort mechanisms between DataTable and server-side sorting
- Enhanced color dropdown functionality
- Automatic merging of duplicate color entries with same color_id
- Keeps entries with valid RGB data, removes entries with None/empty RGB
- Preserves selection state during dropdown consolidation
- Consistent search behavior (instant for client-side, Enter key for server-side)
- Mobile-friendly pagination navigation
- Added performance optimization
- SQLite WAL Mode:
- Increased cache size to 10,000 pages (~40MB) for faster query execution
- Set temp_store to memory for accelerated temporary operations
- Enabled foreign key constraints and optimized synchronous mode
- Added ANALYZE for improved query planning and statistics
- Database Indexes (Migration 0019):
- High-impact composite index for problem parts aggregation (`idx_bricktracker_parts_id_missing_damaged`)
- Parts lookup optimization (`idx_bricktracker_parts_part_color_spare`)
- Set storage filtering (`idx_bricktracker_sets_set_storage`)
- Search optimization with case-insensitive indexes (`idx_rebrickable_sets_name_lower`, `idx_rebrickable_parts_name_lower`)
- Year and theme filtering optimization (`idx_rebrickable_sets_year`, `idx_rebrickable_sets_theme_id`)
- Additional indexes for purchase dates, quantities, sorting, and minifigures aggregation
- Statistics Query Optimization:
- Replaced separate subqueries with efficient CTEs (Common Table Expressions)
- Consolidated aggregations for set, part, minifigure, and financial statistics
- Added default image handling for sets without images
- Sets with null/missing images from Rebrickable API now display placeholder image
- Automatic fallback to nil.png from parts folder for set previews
- Copy of nil placeholder saved as set image for consistent display across all routes
- Prevents errors when downloading sets that have no set_img_url in API response
- Fixed instructions download from Rebrickable
- Replaced cloudscraper with standard requests library
- Resolves 403 Forbidden errors when downloading instruction PDFs
- Fixed instructions display and URL generation
- Fixed "Open PDF" button links to use correct data route
- Corrected path resolution for data/instructions folder
- Fixed instruction listing page to scan correct folder location
- Fixed Peeron PDF creation to use correct data folder path
- Fixed foreign key constraint error when adding sets
- Rebrickable set is now inserted before BrickTracker set to satisfy FK constraints
- Resolves "FOREIGN KEY constraint failed" error when adding sets
- Fixed atomic transaction handling for set downloads
- All database operations during set addition now use deferred execution
- Ensures all-or-nothing behavior: if any part fails (set info, parts, minifigs), nothing is committed
- Prevents partial set additions that would leave the database in an inconsistent state
- Metadata updates (owners, tags) now defer until final commit
## 1.2.4
> **Warning** > **Warning**
> To use the new BrickLink color parameter in URLs, update your `.env` file: > To use the new BrickLink color parameter in URLs, update your `.env` file:

View File

@@ -2,13 +2,16 @@ FROM python:3-slim
WORKDIR /app WORKDIR /app
# Copy requirements first (so pip install can be cached)
COPY requirements.txt .
# Python library requirements
RUN pip install --no-cache-dir -r requirements.txt
# Bricktracker # Bricktracker
COPY . . COPY . .
# Fix line endings and set executable permissions for entrypoint script # Set executable permissions for entrypoint script
RUN sed -i 's/\r$//' entrypoint.sh && chmod +x entrypoint.sh RUN chmod +x entrypoint.sh
# Python library requirements
RUN pip --no-cache-dir install -r requirements.txt
ENTRYPOINT ["./entrypoint.sh"] ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -1,3 +1,5 @@
<img src="static/brick.png" height="100" width="100">
# BrickTracker # BrickTracker
A web application for organizing and tracking LEGO sets, parts, and minifigures. Uses the Rebrickable API to fetch LEGO data and allows users to track missing pieces and collection status. A web application for organizing and tracking LEGO sets, parts, and minifigures. Uses the Rebrickable API to fetch LEGO data and allows users to track missing pieces and collection status.
@@ -18,17 +20,13 @@ A web application for organizing and tracking LEGO sets, parts, and minifigures.
Use the provided [compose.yaml](compose.yaml) file. Use the provided [compose.yaml](compose.yaml) file.
See [Quickstart](docs/quickstart.md) to get up and running right away. See [Quick Start](https://bricktracker.baerentsen.space/quick-start) to get up and running right away.
See [Setup](docs/setup.md) for a more setup guide. See [Walk Through](https://bricktracker.baerentsen.space/tutorial-first-steps) for a more detailed guide.
## Usage
See [first steps](docs/first-steps.md).
## Documentation ## Documentation
Most of the pages should be self explanatory to use. Most of the pages should be self explanatory to use.
However, you can find more specific documentation in the [documentation](docs/DOCS.md). However, you can find more specific documentation in the [documentation](https://bricktracker.baerentsen.space/whatis).
You can find screenshots of the application in the [overview](docs/overview.md) documentation file. You can find screenshots of the application in the [overview](https://bricktracker.baerentsen.space/overview) documentation.

View File

@@ -1,6 +1,8 @@
import logging import logging
import os
import sys import sys
import time import time
from pathlib import Path
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from flask import current_app, Flask, g from flask import current_app, Flask, g
@@ -10,6 +12,7 @@ from bricktracker.configuration_list import BrickConfigurationList
from bricktracker.login import LoginManager from bricktracker.login import LoginManager
from bricktracker.navbar import Navbar from bricktracker.navbar import Navbar
from bricktracker.sql import close from bricktracker.sql import close
from bricktracker.template_filters import replace_query_filter
from bricktracker.version import __version__ from bricktracker.version import __version__
from bricktracker.views.add import add_page from bricktracker.views.add import add_page
from bricktracker.views.admin.admin import admin_page from bricktracker.views.admin.admin import admin_page
@@ -24,6 +27,7 @@ from bricktracker.views.admin.status import admin_status_page
from bricktracker.views.admin.storage import admin_storage_page from bricktracker.views.admin.storage import admin_storage_page
from bricktracker.views.admin.tag import admin_tag_page from bricktracker.views.admin.tag import admin_tag_page
from bricktracker.views.admin.theme import admin_theme_page from bricktracker.views.admin.theme import admin_theme_page
from bricktracker.views.data import data_page
from bricktracker.views.error import error_404 from bricktracker.views.error import error_404
from bricktracker.views.index import index_page from bricktracker.views.index import index_page
from bricktracker.views.instructions import instructions_page from bricktracker.views.instructions import instructions_page
@@ -31,11 +35,65 @@ from bricktracker.views.login import login_page
from bricktracker.views.minifigure import minifigure_page from bricktracker.views.minifigure import minifigure_page
from bricktracker.views.part import part_page from bricktracker.views.part import part_page
from bricktracker.views.set import set_page from bricktracker.views.set import set_page
from bricktracker.views.statistics import statistics_page
from bricktracker.views.storage import storage_page from bricktracker.views.storage import storage_page
from bricktracker.views.wish import wish_page from bricktracker.views.wish import wish_page
def load_env_file() -> None:
"""Load .env file into os.environ with priority: data/.env > .env (root)
Also stores which BK_ variables were set via Docker environment (before loading .env)
so we can detect locked variables in the admin panel.
"""
import json
data_env = Path('data/.env')
root_env = Path('.env')
# Store which BK_ variables were already in environment BEFORE loading .env
# These are "locked" (set via Docker's environment: directive)
docker_env_vars = {k: v for k, v in os.environ.items() if k.startswith('BK_')}
# Store this in a way the admin panel can access it
# We'll use an environment variable to store the JSON list of locked var names
os.environ['_BK_DOCKER_ENV_VARS'] = json.dumps(list(docker_env_vars.keys()))
env_file = None
if data_env.exists():
env_file = data_env
logging.info(f"Loading environment from: {data_env}")
elif root_env.exists():
env_file = root_env
logging.info(f"Loading environment from: {root_env} (consider migrating to data/.env)")
if env_file:
# Simple .env parser (no external dependencies needed)
with open(env_file, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip()
# Skip comments and empty lines
if not line or line.startswith('#'):
continue
# Parse key=value
if '=' in line:
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
# Remove quotes if present
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
elif value.startswith("'") and value.endswith("'"):
value = value[1:-1]
# Only set if not already in environment (environment variables take precedence)
if key not in os.environ:
os.environ[key] = value
def setup_app(app: Flask) -> None: def setup_app(app: Flask) -> None:
# Load .env file before configuration (if not already loaded by Docker Compose)
load_env_file()
# Load the configuration # Load the configuration
BrickConfigurationList(app) BrickConfigurationList(app)
@@ -59,7 +117,8 @@ def setup_app(app: Flask) -> None:
# Setup the login manager # Setup the login manager
LoginManager(app) LoginManager(app)
# I don't know :-) # Configure proxy header handling for reverse proxy deployments (nginx, Apache, etc.)
# This ensures proper client IP detection and HTTPS scheme recognition
app.wsgi_app = ProxyFix( app.wsgi_app = ProxyFix(
app.wsgi_app, app.wsgi_app,
x_for=1, x_for=1,
@@ -74,12 +133,14 @@ def setup_app(app: Flask) -> None:
# Register app routes # Register app routes
app.register_blueprint(add_page) app.register_blueprint(add_page)
app.register_blueprint(data_page)
app.register_blueprint(index_page) app.register_blueprint(index_page)
app.register_blueprint(instructions_page) app.register_blueprint(instructions_page)
app.register_blueprint(login_page) app.register_blueprint(login_page)
app.register_blueprint(minifigure_page) app.register_blueprint(minifigure_page)
app.register_blueprint(part_page) app.register_blueprint(part_page)
app.register_blueprint(set_page) app.register_blueprint(set_page)
app.register_blueprint(statistics_page)
app.register_blueprint(storage_page) app.register_blueprint(storage_page)
app.register_blueprint(wish_page) app.register_blueprint(wish_page)
@@ -121,6 +182,9 @@ def setup_app(app: Flask) -> None:
# Version # Version
g.version = __version__ g.version = __version__
# Register custom Jinja2 filters
app.jinja_env.filters['replace_query'] = replace_query_filter
# Make sure all connections are closed at the end # Make sure all connections are closed at the end
@app.teardown_request @app.teardown_request
def teardown_request(_: BaseException | None) -> None: def teardown_request(_: BaseException | None) -> None:

View File

@@ -11,8 +11,9 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'AUTHENTICATION_PASSWORD', 'd': ''}, {'n': 'AUTHENTICATION_PASSWORD', 'd': ''},
{'n': 'AUTHENTICATION_KEY', 'd': ''}, {'n': 'AUTHENTICATION_KEY', 'd': ''},
{'n': 'BRICKLINK_LINK_PART_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}'}, # noqa: E501 {'n': 'BRICKLINK_LINK_PART_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}'}, # noqa: E501
{'n': 'BRICKLINK_LINK_SET_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num}'}, # noqa: E501
{'n': 'BRICKLINK_LINKS', 'c': bool}, {'n': 'BRICKLINK_LINKS', 'c': bool},
{'n': 'DATABASE_PATH', 'd': './app.db'}, {'n': 'DATABASE_PATH', 'd': 'data/app.db'},
{'n': 'DATABASE_TIMESTAMP_FORMAT', 'd': '%Y-%m-%d-%H-%M-%S'}, {'n': 'DATABASE_TIMESTAMP_FORMAT', 'd': '%Y-%m-%d-%H-%M-%S'},
{'n': 'DEBUG', 'c': bool}, {'n': 'DEBUG', 'c': bool},
{'n': 'DEFAULT_TABLE_PER_PAGE', 'd': 25, 'c': int}, {'n': 'DEFAULT_TABLE_PER_PAGE', 'd': 25, 'c': int},
@@ -21,25 +22,40 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'HOST', 'd': '0.0.0.0'}, {'n': 'HOST', 'd': '0.0.0.0'},
{'n': 'INDEPENDENT_ACCORDIONS', 'c': bool}, {'n': 'INDEPENDENT_ACCORDIONS', 'c': bool},
{'n': 'INSTRUCTIONS_ALLOWED_EXTENSIONS', 'd': ['.pdf'], 'c': list}, # noqa: E501 {'n': 'INSTRUCTIONS_ALLOWED_EXTENSIONS', 'd': ['.pdf'], 'c': list}, # noqa: E501
{'n': 'INSTRUCTIONS_FOLDER', 'd': 'instructions', 's': True}, {'n': 'INSTRUCTIONS_FOLDER', 'd': 'data/instructions'},
{'n': 'HIDE_ADD_SET', 'c': bool}, {'n': 'HIDE_ADD_SET', 'c': bool},
{'n': 'HIDE_ADD_BULK_SET', 'c': bool}, {'n': 'HIDE_ADD_BULK_SET', 'c': bool},
{'n': 'HIDE_ADMIN', 'c': bool}, {'n': 'HIDE_ADMIN', 'c': bool},
{'n': 'ADMIN_DEFAULT_EXPANDED_SECTIONS', 'd': ['database'], 'c': list},
{'n': 'HIDE_ALL_INSTRUCTIONS', 'c': bool}, {'n': 'HIDE_ALL_INSTRUCTIONS', 'c': bool},
{'n': 'HIDE_ALL_MINIFIGURES', 'c': bool}, {'n': 'HIDE_ALL_MINIFIGURES', 'c': bool},
{'n': 'HIDE_ALL_PARTS', 'c': bool}, {'n': 'HIDE_ALL_PARTS', 'c': bool},
{'n': 'HIDE_ALL_PROBLEMS_PARTS', 'e': 'BK_HIDE_MISSING_PARTS', 'c': bool}, {'n': 'HIDE_ALL_PROBLEMS_PARTS', 'e': 'BK_HIDE_MISSING_PARTS', 'c': bool},
{'n': 'HIDE_ALL_SETS', 'c': bool}, {'n': 'HIDE_ALL_SETS', 'c': bool},
{'n': 'HIDE_ALL_STORAGES', 'c': bool}, {'n': 'HIDE_ALL_STORAGES', 'c': bool},
{'n': 'HIDE_STATISTICS', 'c': bool},
{'n': 'HIDE_SET_INSTRUCTIONS', 'c': bool}, {'n': 'HIDE_SET_INSTRUCTIONS', 'c': bool},
{'n': 'HIDE_TABLE_DAMAGED_PARTS', 'c': bool}, {'n': 'HIDE_TABLE_DAMAGED_PARTS', 'c': bool},
{'n': 'HIDE_TABLE_MISSING_PARTS', 'c': bool}, {'n': 'HIDE_TABLE_MISSING_PARTS', 'c': bool},
{'n': 'HIDE_TABLE_CHECKED_PARTS', 'c': bool},
{'n': 'HIDE_WISHES', 'c': bool}, {'n': 'HIDE_WISHES', 'c': bool},
{'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"rebrickable_minifigures"."name" ASC'}, # noqa: E501 {'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"rebrickable_minifigures"."name" ASC'}, # noqa: E501
{'n': 'MINIFIGURES_FOLDER', 'd': 'minifigs', 's': True}, {'n': 'MINIFIGURES_FOLDER', 'd': 'data/minifigures'},
{'n': 'MINIFIGURES_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
{'n': 'MINIFIGURES_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
{'n': 'MINIFIGURES_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'NO_THREADED_SOCKET', 'c': bool}, {'n': 'NO_THREADED_SOCKET', 'c': bool},
{'n': 'PARTS_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'SETS_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'PARTS_DEFAULT_ORDER', 'd': '"rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "bricktracker_parts"."spare" ASC'}, # noqa: E501 {'n': 'PARTS_DEFAULT_ORDER', 'd': '"rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "bricktracker_parts"."spare" ASC'}, # noqa: E501
{'n': 'PARTS_FOLDER', 'd': 'parts', 's': True}, {'n': 'PARTS_FOLDER', 'd': 'data/parts'},
{'n': 'PARTS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
{'n': 'PARTS_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
{'n': 'PROBLEMS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
{'n': 'PROBLEMS_PAGINATION_SIZE_MOBILE', 'd': 10, 'c': int},
{'n': 'PROBLEMS_SERVER_SIDE_PAGINATION', 'c': bool},
{'n': 'SETS_PAGINATION_SIZE_DESKTOP', 'd': 12, 'c': int},
{'n': 'SETS_PAGINATION_SIZE_MOBILE', 'd': 4, 'c': int},
{'n': 'PORT', 'd': 3333, 'c': int}, {'n': 'PORT', 'd': 3333, 'c': int},
{'n': 'PURCHASE_DATE_FORMAT', 'd': '%d/%m/%Y'}, {'n': 'PURCHASE_DATE_FORMAT', 'd': '%d/%m/%Y'},
{'n': 'PURCHASE_CURRENCY', 'd': ''}, {'n': 'PURCHASE_CURRENCY', 'd': ''},
@@ -52,21 +68,33 @@ CONFIG: Final[list[dict[str, Any]]] = [
{'n': 'REBRICKABLE_LINK_PART_PATTERN', 'd': 'https://rebrickable.com/parts/{part}/_/{color}'}, # noqa: E501 {'n': 'REBRICKABLE_LINK_PART_PATTERN', 'd': 'https://rebrickable.com/parts/{part}/_/{color}'}, # noqa: E501
{'n': 'REBRICKABLE_LINK_INSTRUCTIONS_PATTERN', 'd': 'https://rebrickable.com/instructions/{path}'}, # noqa: E501 {'n': 'REBRICKABLE_LINK_INSTRUCTIONS_PATTERN', 'd': 'https://rebrickable.com/instructions/{path}'}, # noqa: E501
{'n': 'REBRICKABLE_USER_AGENT', 'd': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}, # noqa: E501 {'n': 'REBRICKABLE_USER_AGENT', 'd': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}, # noqa: E501
{'n': 'USER_AGENT', 'd': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}, # noqa: E501
{'n': 'PEERON_DOWNLOAD_DELAY', 'd': 1000, 'c': int},
{'n': 'PEERON_INSTRUCTION_PATTERN', 'd': 'http://peeron.com/scans/{set_number}-{version_number}'},
{'n': 'PEERON_MIN_IMAGE_SIZE', 'd': 100, 'c': int},
{'n': 'PEERON_SCAN_PATTERN', 'd': 'http://belay.peeron.com/scans/{set_number}-{version_number}/'},
{'n': 'PEERON_THUMBNAIL_PATTERN', 'd': 'http://belay.peeron.com/thumbs/{set_number}-{version_number}/'},
{'n': 'REBRICKABLE_LINKS', 'e': 'LINKS', 'c': bool}, {'n': 'REBRICKABLE_LINKS', 'e': 'LINKS', 'c': bool},
{'n': 'REBRICKABLE_PAGE_SIZE', 'd': 100, 'c': int}, {'n': 'REBRICKABLE_PAGE_SIZE', 'd': 100, 'c': int},
{'n': 'RETIRED_SETS_FILE_URL', 'd': 'https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date'}, # noqa: E501 {'n': 'RETIRED_SETS_FILE_URL', 'd': 'https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date'}, # noqa: E501
{'n': 'RETIRED_SETS_PATH', 'd': './retired_sets.csv'}, {'n': 'RETIRED_SETS_PATH', 'd': 'data/retired_sets.csv'},
{'n': 'SETS_DEFAULT_ORDER', 'd': '"rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC'}, # noqa: E501 {'n': 'SETS_DEFAULT_ORDER', 'd': '"rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC'}, # noqa: E501
{'n': 'SETS_FOLDER', 'd': 'sets', 's': True}, {'n': 'SETS_FOLDER', 'd': 'data/sets'},
{'n': 'SETS_CONSOLIDATION', 'd': False, 'c': bool},
{'n': 'SHOW_GRID_FILTERS', 'c': bool}, {'n': 'SHOW_GRID_FILTERS', 'c': bool},
{'n': 'SHOW_GRID_SORT', 'c': bool}, {'n': 'SHOW_GRID_SORT', 'c': bool},
{'n': 'SHOW_SETS_DUPLICATE_FILTER', 'd': True, 'c': bool},
{'n': 'SKIP_SPARE_PARTS', 'c': bool}, {'n': 'SKIP_SPARE_PARTS', 'c': bool},
{'n': 'HIDE_SPARE_PARTS', 'c': bool},
{'n': 'SOCKET_NAMESPACE', 'd': 'bricksocket'}, {'n': 'SOCKET_NAMESPACE', 'd': 'bricksocket'},
{'n': 'SOCKET_PATH', 'd': '/bricksocket/'}, {'n': 'SOCKET_PATH', 'd': '/bricksocket/'},
{'n': 'STORAGE_DEFAULT_ORDER', 'd': '"bricktracker_metadata_storages"."name" ASC'}, # noqa: E501 {'n': 'STORAGE_DEFAULT_ORDER', 'd': '"bricktracker_metadata_storages"."name" ASC'}, # noqa: E501
{'n': 'THEMES_FILE_URL', 'd': 'https://cdn.rebrickable.com/media/downloads/themes.csv.gz'}, # noqa: E501 {'n': 'THEMES_FILE_URL', 'd': 'https://cdn.rebrickable.com/media/downloads/themes.csv.gz'}, # noqa: E501
{'n': 'THEMES_PATH', 'd': './themes.csv'}, {'n': 'THEMES_PATH', 'd': 'data/themes.csv'},
{'n': 'TIMEZONE', 'd': 'Etc/UTC'}, {'n': 'TIMEZONE', 'd': 'Etc/UTC'},
{'n': 'USE_REMOTE_IMAGES', 'c': bool}, {'n': 'USE_REMOTE_IMAGES', 'c': bool},
{'n': 'WISHES_DEFAULT_ORDER', 'd': '"bricktracker_wishes"."rowid" DESC'}, {'n': 'WISHES_DEFAULT_ORDER', 'd': '"bricktracker_wishes"."rowid" DESC'},
{'n': 'STATISTICS_SHOW_CHARTS', 'd': True, 'c': bool},
{'n': 'STATISTICS_DEFAULT_EXPANDED', 'd': True, 'c': bool},
{'n': 'DARK_MODE', 'c': bool},
] ]

View File

@@ -0,0 +1,331 @@
import os
import logging
from typing import Any, Dict, Final, List, Optional
from pathlib import Path
from flask import current_app
logger = logging.getLogger(__name__)
# Environment variables that can be changed live without restart
LIVE_CHANGEABLE_VARS: Final[List[str]] = [
'BK_BRICKLINK_LINKS',
'BK_DEFAULT_TABLE_PER_PAGE',
'BK_INDEPENDENT_ACCORDIONS',
'BK_HIDE_ADD_SET',
'BK_HIDE_ADD_BULK_SET',
'BK_HIDE_ADMIN',
'BK_ADMIN_DEFAULT_EXPANDED_SECTIONS',
'BK_HIDE_ALL_INSTRUCTIONS',
'BK_HIDE_ALL_MINIFIGURES',
'BK_HIDE_ALL_PARTS',
'BK_HIDE_ALL_PROBLEMS_PARTS',
'BK_HIDE_ALL_SETS',
'BK_HIDE_ALL_STORAGES',
'BK_HIDE_STATISTICS',
'BK_HIDE_SET_INSTRUCTIONS',
'BK_HIDE_TABLE_DAMAGED_PARTS',
'BK_HIDE_TABLE_MISSING_PARTS',
'BK_HIDE_TABLE_CHECKED_PARTS',
'BK_HIDE_WISHES',
'BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP',
'BK_MINIFIGURES_PAGINATION_SIZE_MOBILE',
'BK_MINIFIGURES_SERVER_SIDE_PAGINATION',
'BK_PARTS_PAGINATION_SIZE_DESKTOP',
'BK_PARTS_PAGINATION_SIZE_MOBILE',
'BK_PARTS_SERVER_SIDE_PAGINATION',
'BK_SETS_SERVER_SIDE_PAGINATION',
'BK_PROBLEMS_PAGINATION_SIZE_DESKTOP',
'BK_PROBLEMS_PAGINATION_SIZE_MOBILE',
'BK_PROBLEMS_SERVER_SIDE_PAGINATION',
'BK_SETS_PAGINATION_SIZE_DESKTOP',
'BK_SETS_PAGINATION_SIZE_MOBILE',
'BK_SETS_CONSOLIDATION',
'BK_RANDOM',
'BK_REBRICKABLE_LINKS',
'BK_SHOW_GRID_FILTERS',
'BK_SHOW_GRID_SORT',
'BK_SHOW_SETS_DUPLICATE_FILTER',
'BK_SKIP_SPARE_PARTS',
'BK_HIDE_SPARE_PARTS',
'BK_USE_REMOTE_IMAGES',
'BK_PEERON_DOWNLOAD_DELAY',
'BK_PEERON_MIN_IMAGE_SIZE',
'BK_REBRICKABLE_PAGE_SIZE',
'BK_STATISTICS_SHOW_CHARTS',
'BK_STATISTICS_DEFAULT_EXPANDED',
'BK_DARK_MODE',
# Default ordering and formatting
'BK_INSTRUCTIONS_ALLOWED_EXTENSIONS',
'BK_MINIFIGURES_DEFAULT_ORDER',
'BK_PARTS_DEFAULT_ORDER',
'BK_SETS_DEFAULT_ORDER',
'BK_PURCHASE_LOCATION_DEFAULT_ORDER',
'BK_STORAGE_DEFAULT_ORDER',
'BK_WISHES_DEFAULT_ORDER',
# URL and Pattern Variables
'BK_BRICKLINK_LINK_PART_PATTERN',
'BK_BRICKLINK_LINK_SET_PATTERN',
'BK_REBRICKABLE_IMAGE_NIL',
'BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE',
'BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN',
'BK_REBRICKABLE_LINK_PART_PATTERN',
'BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN',
'BK_PEERON_INSTRUCTION_PATTERN',
'BK_PEERON_SCAN_PATTERN',
'BK_PEERON_THUMBNAIL_PATTERN',
'BK_RETIRED_SETS_FILE_URL',
'BK_RETIRED_SETS_PATH',
'BK_THEMES_FILE_URL',
'BK_THEMES_PATH'
]
# Environment variables that require restart
RESTART_REQUIRED_VARS: Final[List[str]] = [
'BK_AUTHENTICATION_PASSWORD',
'BK_AUTHENTICATION_KEY',
'BK_DATABASE_PATH',
'BK_DEBUG',
'BK_DOMAIN_NAME',
'BK_HOST',
'BK_PORT',
'BK_SOCKET_NAMESPACE',
'BK_SOCKET_PATH',
'BK_NO_THREADED_SOCKET',
'BK_TIMEZONE',
'BK_REBRICKABLE_API_KEY',
'BK_INSTRUCTIONS_FOLDER',
'BK_PARTS_FOLDER',
'BK_SETS_FOLDER',
'BK_MINIFIGURES_FOLDER',
'BK_DATABASE_TIMESTAMP_FORMAT',
'BK_FILE_DATETIME_FORMAT',
'BK_PURCHASE_DATE_FORMAT',
'BK_PURCHASE_CURRENCY',
'BK_REBRICKABLE_USER_AGENT',
'BK_USER_AGENT'
]
class ConfigManager:
"""Manages live configuration updates for BrickTracker"""
def __init__(self):
# Check for .env in data folder first (v1.3+), fallback to root (backward compatibility)
data_env = Path('data/.env')
root_env = Path('.env')
if data_env.exists():
self.env_file_path = data_env
logger.info("Using configuration file: data/.env")
elif root_env.exists():
self.env_file_path = root_env
logger.info("Using configuration file: .env (consider migrating to data/.env)")
else:
# Default to data/.env for new installations
self.env_file_path = data_env
logger.info("Configuration file will be created at: data/.env")
def get_current_config(self) -> Dict[str, Any]:
"""Get current configuration values for live-changeable variables"""
config = {}
for var in LIVE_CHANGEABLE_VARS:
# Get internal config name
internal_name = var.replace('BK_', '')
# Get current value from Flask config
if internal_name in current_app.config:
config[var] = current_app.config[internal_name]
else:
# Fallback to environment variable
config[var] = os.environ.get(var, '')
return config
def get_restart_required_config(self) -> Dict[str, Any]:
"""Get current configuration values for restart-required variables"""
config = {}
for var in RESTART_REQUIRED_VARS:
# Get internal config name
internal_name = var.replace('BK_', '')
# Get current value from Flask config
if internal_name in current_app.config:
config[var] = current_app.config[internal_name]
else:
# Fallback to environment variable
config[var] = os.environ.get(var, '')
return config
def update_config(self, updates: Dict[str, Any]) -> Dict[str, str]:
"""Update configuration values. Returns dict with status for each update"""
results = {}
for var_name, new_value in updates.items():
if var_name not in LIVE_CHANGEABLE_VARS:
results[var_name] = f"Error: {var_name} requires restart to change"
continue
try:
# Update environment variable
os.environ[var_name] = str(new_value)
# Update Flask config
internal_name = var_name.replace('BK_', '')
cast_value = self._cast_value(var_name, new_value)
current_app.config[internal_name] = cast_value
# Update .env file
self._update_env_file(var_name, new_value)
results[var_name] = "Updated successfully"
if current_app.debug:
logger.info(f"Config updated: {var_name}={new_value}")
except Exception as e:
results[var_name] = f"Error: {str(e)}"
logger.error(f"Failed to update {var_name}: {e}")
return results
def _cast_value(self, var_name: str, value: Any) -> Any:
"""Cast value to appropriate type based on variable name"""
# List variables (admin sections) - Check this FIRST before boolean check
if 'sections' in var_name.lower():
if isinstance(value, str):
return [section.strip() for section in value.split(',') if section.strip()]
elif isinstance(value, list):
return value
else:
return []
# Integer variables (pagination sizes, delays, etc.) - Check BEFORE boolean check
if any(keyword in var_name.lower() for keyword in ['_size', '_page', 'delay', 'min_', 'per_page', 'page_size']):
try:
return int(value)
except (ValueError, TypeError):
return 0
# Boolean variables - More specific patterns to avoid conflicts
if any(keyword in var_name.lower() for keyword in ['hide_', 'server_side_pagination', '_links', 'random', 'skip_', 'show_', 'use_', '_consolidation', '_charts', '_expanded']):
if isinstance(value, str):
return value.lower() in ('true', '1', 'yes', 'on')
return bool(value)
# String variables (default)
return str(value)
def _format_env_value(self, value: Any) -> str:
"""Format value for .env file storage"""
if isinstance(value, bool):
return 'true' if value else 'false'
elif isinstance(value, (int, float)):
return str(value)
elif isinstance(value, list):
return ','.join(str(item) for item in value)
elif value is None:
return ''
else:
return str(value)
def _update_env_file(self, var_name: str, value: Any) -> None:
"""Update the .env file with new value"""
if not self.env_file_path.exists():
# Ensure parent directory exists
self.env_file_path.parent.mkdir(parents=True, exist_ok=True)
self.env_file_path.touch()
# Read current .env content
lines = []
if self.env_file_path.exists():
with open(self.env_file_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
# Format value for .env file
env_value = self._format_env_value(value)
# Find and update the line, or add new line
updated = False
# First pass: Look for existing active variable
for i, line in enumerate(lines):
if line.strip().startswith(f"{var_name}="):
lines[i] = f"{var_name}={env_value}\n"
updated = True
break
# Second pass: If not found, look for commented-out variable
if not updated:
for i, line in enumerate(lines):
stripped = line.strip()
# Check for commented-out variable: # BK_VAR= or #BK_VAR=
if stripped.startswith('#') and var_name in stripped:
# Extract the part after #, handling optional space
comment_content = stripped[1:].strip()
if comment_content.startswith(f"{var_name}=") or comment_content.startswith(f"{var_name} ="):
# Uncomment and set new value, preserving any leading whitespace from original line
leading_whitespace = line[:len(line) - len(line.lstrip())]
lines[i] = f"{leading_whitespace}{var_name}={env_value}\n"
updated = True
logger.info(f"Uncommented and updated {var_name} in .env file")
break
# Third pass: If still not found, append to end
if not updated:
lines.append(f"{var_name}={env_value}\n")
logger.info(f"Added new {var_name} to end of .env file")
# Write back to file
with open(self.env_file_path, 'w', encoding='utf-8') as f:
f.writelines(lines)
def validate_config(self) -> Dict[str, Any]:
"""Validate current configuration"""
issues = []
warnings = []
# Check if critical variables are set
if not os.environ.get('BK_REBRICKABLE_API_KEY'):
warnings.append("BK_REBRICKABLE_API_KEY not set - some features may not work")
# Check for conflicting settings
if (os.environ.get('BK_PARTS_SERVER_SIDE_PAGINATION', '').lower() == 'false' and
int(os.environ.get('BK_PARTS_PAGINATION_SIZE_DESKTOP', '10')) > 100):
warnings.append("Large pagination size with client-side pagination may cause performance issues")
# Check pagination sizes are reasonable
for var in ['BK_SETS_PAGINATION_SIZE_DESKTOP', 'BK_PARTS_PAGINATION_SIZE_DESKTOP', 'BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP']:
try:
size = int(os.environ.get(var, '10'))
if size < 1:
issues.append(f"{var} must be at least 1")
elif size > 1000:
warnings.append(f"{var} is very large ({size}) - may cause performance issues")
except ValueError:
issues.append(f"{var} must be a valid integer")
return {
'issues': issues,
'warnings': warnings,
'status': 'valid' if not issues else 'has_issues'
}
def get_variable_help(self, var_name: str) -> str:
"""Get help text for a configuration variable"""
help_text = {
'BK_BRICKLINK_LINKS': 'Show BrickLink links throughout the application',
'BK_DEFAULT_TABLE_PER_PAGE': 'Default number of items per page in tables',
'BK_INDEPENDENT_ACCORDIONS': 'Make accordion sections independent (can open multiple)',
'BK_HIDE_ADD_SET': 'Hide the "Add Set" menu entry',
'BK_HIDE_ADD_BULK_SET': 'Hide the "Add Bulk Set" menu entry',
'BK_HIDE_ADMIN': 'Hide the "Admin" menu entry',
'BK_ADMIN_DEFAULT_EXPANDED_SECTIONS': 'Admin sections to expand by default (comma-separated)',
'BK_HIDE_ALL_INSTRUCTIONS': 'Hide the "Instructions" menu entry',
'BK_HIDE_ALL_MINIFIGURES': 'Hide the "Minifigures" menu entry',
'BK_HIDE_ALL_PARTS': 'Hide the "Parts" menu entry',
'BK_HIDE_ALL_PROBLEMS_PARTS': 'Hide the "Problems" menu entry',
'BK_HIDE_ALL_SETS': 'Hide the "Sets" menu entry',
'BK_HIDE_ALL_STORAGES': 'Hide the "Storages" menu entry',
'BK_HIDE_STATISTICS': 'Hide the "Statistics" menu entry',
'BK_HIDE_SET_INSTRUCTIONS': 'Hide instructions section in set details',
'BK_HIDE_TABLE_DAMAGED_PARTS': 'Hide the "Damaged" column in parts tables',
'BK_HIDE_TABLE_MISSING_PARTS': 'Hide the "Missing" column in parts tables',
'BK_HIDE_TABLE_CHECKED_PARTS': 'Hide the "Checked" column in parts tables',
'BK_HIDE_WISHES': 'Hide the "Wishes" menu entry',
'BK_SETS_CONSOLIDATION': 'Enable set consolidation/grouping functionality',
'BK_SHOW_GRID_FILTERS': 'Show filter options on grids by default',
'BK_SHOW_GRID_SORT': 'Show sort options on grids by default',
'BK_SKIP_SPARE_PARTS': 'Skip importing spare parts when downloading sets from Rebrickable',
'BK_HIDE_SPARE_PARTS': 'Hide spare parts from parts lists (spare parts must still be in database)',
'BK_USE_REMOTE_IMAGES': 'Use remote images from Rebrickable CDN instead of local',
'BK_STATISTICS_SHOW_CHARTS': 'Show collection growth charts on statistics page',
'BK_STATISTICS_DEFAULT_EXPANDED': 'Expand all statistics sections by default',
'BK_DARK_MODE': 'Enable dark mode theme'
}
return help_text.get(var_name, 'No help available for this variable')

View File

@@ -60,7 +60,7 @@ class BrickConfiguration(object):
if self.cast == bool and isinstance(value, str): if self.cast == bool and isinstance(value, str):
value = value.lower() in ('true', 'yes', '1') value = value.lower() in ('true', 'yes', '1')
# Static path fixup # Static path fixup (legacy - only for paths with s: True flag)
if self.static_path and isinstance(value, str): if self.static_path and isinstance(value, str):
value = os.path.normpath(value) value = os.path.normpath(value)
@@ -70,6 +70,10 @@ class BrickConfiguration(object):
# Remove static prefix # Remove static prefix
value = value.removeprefix('static/') value = value.removeprefix('static/')
# Normalize regular paths (not marked as static)
elif not self.static_path and isinstance(value, str) and ('FOLDER' in self.name or 'PATH' in self.name):
value = os.path.normpath(value)
# Type casting # Type casting
if self.cast is not None: if self.cast is not None:
self.value = self.cast(value) self.value = self.cast(value)

View File

@@ -13,7 +13,6 @@ import requests
from werkzeug.datastructures import FileStorage from werkzeug.datastructures import FileStorage
from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
import re import re
import cloudscraper
from .exceptions import ErrorException, DownloadException from .exceptions import ErrorException, DownloadException
if TYPE_CHECKING: if TYPE_CHECKING:
@@ -101,16 +100,39 @@ class BrickInstructions(object):
# Skip if we already have it # Skip if we already have it
if os.path.isfile(target): if os.path.isfile(target):
pdf_url = self.url()
return self.socket.complete( return self.socket.complete(
message=f"File {self.filename} already exists, skipped" message=f'File {self.filename} already exists, skipped - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
) )
# Fetch PDF via cloudscraper (to bypass Cloudflare) # Use plain requests instead of cloudscraper
scraper = cloudscraper.create_scraper() session = requests.Session()
scraper.headers.update({ session.headers.update({
"User-Agent": current_app.config['REBRICKABLE_USER_AGENT'] 'User-Agent': current_app.config['REBRICKABLE_USER_AGENT'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Cache-Control': 'max-age=0'
}) })
resp = scraper.get(path, stream=True)
# Visit the set's instructions listing page first to establish session cookies
set_number = None
if self.rebrickable:
set_number = self.rebrickable.fields.set
elif self.set:
set_number = self.set
if set_number:
instructions_page = f"https://rebrickable.com/instructions/{set_number}/"
session.get(instructions_page)
session.headers.update({"Referer": instructions_page})
resp = session.get(path, stream=True, allow_redirects=True)
if not resp.ok: if not resp.ok:
raise DownloadException(f"Failed to download: HTTP {resp.status_code}") raise DownloadException(f"Failed to download: HTTP {resp.status_code}")
@@ -141,8 +163,9 @@ class BrickInstructions(object):
# Done! # Done!
logger.info(f"Downloaded {self.filename}") logger.info(f"Downloaded {self.filename}")
pdf_url = self.url()
self.socket.complete( self.socket.complete(
message=f"File {self.filename} downloaded ({self.human_size()})" message=f'File {self.filename} downloaded ({self.human_size()}) - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
) )
except Exception as e: except Exception as e:
@@ -170,11 +193,16 @@ class BrickInstructions(object):
if filename is None: if filename is None:
filename = self.filename filename = self.filename
return os.path.join( folder = current_app.config['INSTRUCTIONS_FOLDER']
current_app.static_folder, # type: ignore
current_app.config['INSTRUCTIONS_FOLDER'], # If folder is absolute, use it directly
filename # Otherwise, make it relative to app root (not static folder)
) if os.path.isabs(folder):
base_path = folder
else:
base_path = os.path.join(current_app.root_path, folder)
return os.path.join(base_path, filename)
# Rename an instructions file # Rename an instructions file
def rename(self, filename: str, /) -> None: def rename(self, filename: str, /) -> None:
@@ -215,10 +243,16 @@ class BrickInstructions(object):
folder: str = current_app.config['INSTRUCTIONS_FOLDER'] folder: str = current_app.config['INSTRUCTIONS_FOLDER']
# Compute the path # Determine which route to use based on folder path
path = os.path.join(folder, self.filename) # If folder contains 'data' (new structure), use data route
# Otherwise use static route (legacy)
return url_for('static', filename=path) if 'data' in folder:
return url_for('data.serve_data_file', folder='instructions', filename=self.filename)
else:
# Legacy: folder is relative to static/
folder_clean = folder.removeprefix('static/')
path = os.path.join(folder_clean, self.filename)
return url_for('static', filename=path)
# Return the icon depending on the extension # Return the icon depending on the extension
def icon(self, /) -> str: def icon(self, /) -> str:
@@ -235,34 +269,49 @@ class BrickInstructions(object):
@staticmethod @staticmethod
def find_instructions(set: str, /) -> list[Tuple[str, str]]: def find_instructions(set: str, /) -> list[Tuple[str, str]]:
""" """
Scrape Rebrickables HTML and return a list of Scrape Rebrickable's HTML and return a list of
(filename_slug, download_url). Duplicate slugs get _1, _2, … (filename_slug, download_url). Duplicate slugs get _1, _2, …
""" """
page_url = f"https://rebrickable.com/instructions/{set}/" page_url = f"https://rebrickable.com/instructions/{set}/"
logger.debug(f"[find_instructions] fetching HTML from {page_url!r}") logger.debug(f"[find_instructions] fetching HTML from {page_url!r}")
# Solve Cloudflares challenge # Use plain requests instead of cloudscraper
scraper = cloudscraper.create_scraper() session = requests.Session()
scraper.headers.update({'User-Agent': current_app.config['REBRICKABLE_USER_AGENT']}) session.headers.update({
resp = scraper.get(page_url) 'User-Agent': current_app.config['REBRICKABLE_USER_AGENT'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'none',
'Cache-Control': 'max-age=0'
})
resp = session.get(page_url)
if not resp.ok: if not resp.ok:
raise ErrorException(f'Failed to load instructions page for {set}. HTTP {resp.status_code}') raise ErrorException(f'Failed to load instructions page for {set}. HTTP {resp.status_code}')
soup = BeautifulSoup(resp.content, 'html.parser') soup = BeautifulSoup(resp.content, 'html.parser')
# Match download links with or without query parameters (e.g., ?cfe=timestamp&cfk=key)
link_re = re.compile(r'^/instructions/\d+/.+/download/') link_re = re.compile(r'^/instructions/\d+/.+/download/')
raw: list[tuple[str, str]] = [] raw: list[tuple[str, str]] = []
for a in soup.find_all('a', href=link_re): for a in soup.find_all('a', href=link_re):
img = a.find('img', alt=True) img = a.find('img', alt=True) # type: ignore
if not img or set not in img['alt']: if not img or set not in img['alt']: # type: ignore
continue continue
# Turn the alt text into a slug # Turn the alt text into a slug
alt_text = img['alt'].removeprefix('LEGO Building Instructions for ') alt_text = img['alt'].removeprefix('LEGO Building Instructions for ') # type: ignore
slug = re.sub(r'[^A-Za-z0-9]+', '-', alt_text).strip('-') slug = re.sub(r'[^A-Za-z0-9]+', '-', alt_text).strip('-')
# Build the absolute download URL # Build the absolute download URL - this preserves query parameters
download_url = urljoin('https://rebrickable.com', a['href']) # BeautifulSoup's a['href'] includes the full href with ?cfe=...&cfk=... params
download_url = urljoin('https://rebrickable.com', a['href']) # type: ignore
logger.debug(f"[find_instructions] Found download link: {download_url}")
raw.append((slug, download_url)) raw.append((slug, download_url))
if not raw: if not raw:

View File

@@ -36,11 +36,14 @@ class BrickInstructionsList(object):
# Try to list the files in the instruction folder # Try to list the files in the instruction folder
try: try:
# Make a folder relative to static folder_config: str = current_app.config['INSTRUCTIONS_FOLDER']
folder: str = os.path.join(
current_app.static_folder, # type: ignore # If folder is absolute, use it directly
current_app.config['INSTRUCTIONS_FOLDER'], # Otherwise, make it relative to app root (not static folder)
) if os.path.isabs(folder_config):
folder = folder_config
else:
folder = os.path.join(current_app.root_path, folder_config)
for file in os.scandir(folder): for file in os.scandir(folder):
instruction = BrickInstructions(file) instruction = BrickInstructions(file)

View File

@@ -191,19 +191,15 @@ class BrickMetadata(BrickRecord):
parameters['set_id'] = brickset.fields.id parameters['set_id'] = brickset.fields.id
parameters['state'] = state parameters['state'] = state
rows, _ = BrickSQL().execute_and_commit( rows, _ = BrickSQL().execute(
self.update_set_state_query, self.update_set_state_query,
parameters=parameters, parameters=parameters,
defer=True,
name=self.as_column(), name=self.as_column(),
) )
if rows != 1: # Note: rows will be -1 when deferred, so we can't validate here
raise DatabaseException('Could not update the {kind} "{name}" state for set {set} ({id})'.format( # noqa: E501 # Validation will happen at final commit in set.py
kind=self.kind,
name=self.fields.name,
set=brickset.fields.set,
id=brickset.fields.id,
))
# Info # Info
logger.info('{kind} "{name}" state changed to "{state}" for set {set} ({id})'.format( # noqa: E501 logger.info('{kind} "{name}" state changed to "{state}" for set {set} ({id})'.format( # noqa: E501

View File

@@ -111,6 +111,16 @@ class BrickMetadataList(BrickRecordList[T]):
in new.filter(**kwargs) in new.filter(**kwargs)
]) ])
# Return the items as a dictionary mapping column names to UUIDs
@classmethod
def as_column_mapping(cls, /, **kwargs) -> dict:
new = cls.new()
return {
record.as_column(): record.fields.id
for record in new.filter(**kwargs)
}
# Grab a specific status # Grab a specific status
@classmethod @classmethod
def get(cls, id: str | None, /, *, allow_none: bool = False) -> T: def get(cls, id: str | None, /, *, allow_none: bool = False) -> T:

View File

@@ -43,6 +43,19 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
return self return self
# Load all minifigures with problems filter
def all_filtered(self, /, problems_filter: str = 'all', theme_id: str = 'all', year: str = 'all') -> Self:
context = {}
if problems_filter and problems_filter != 'all':
context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
self.list(override_query=self.all_query, **context)
return self
# Load all minifigures by owner # Load all minifigures by owner
def all_by_owner(self, owner_id: str | None = None, /) -> Self: def all_by_owner(self, owner_id: str | None = None, /) -> Self:
# Save the owner_id parameter # Save the owner_id parameter
@@ -53,6 +66,78 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
return self return self
# Load all minifigures by owner with problems filter
def all_by_owner_filtered(self, /, owner_id: str | None = None, problems_filter: str = 'all', theme_id: str = 'all', year: str = 'all') -> Self:
# Save the owner_id parameter
self.fields.owner_id = owner_id
context = {}
if problems_filter and problems_filter != 'all':
context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
# Load the minifigures from the database
self.list(override_query=self.all_by_owner_query, **context)
return self
# Load minifigures with pagination support
def all_filtered_paginated(
self,
owner_id: str | None = None,
problems_filter: str = 'all',
theme_id: str = 'all',
year: str = 'all',
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
sort_field: str | None = None,
sort_order: str = 'asc'
) -> tuple[Self, int]:
# Prepare filter context
filter_context = {}
if owner_id and owner_id != 'all':
filter_context['owner_id'] = owner_id
list_query = self.all_by_owner_query
else:
list_query = self.all_query
if search_query:
filter_context['search_query'] = search_query
if problems_filter and problems_filter != 'all':
filter_context['problems_filter'] = problems_filter
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
# Field mapping for sorting
field_mapping = {
'name': '"rebrickable_minifigures"."name"',
'parts': '"rebrickable_minifigures"."number_of_parts"',
'quantity': '"total_quantity"',
'missing': '"total_missing"',
'damaged': '"total_damaged"',
'sets': '"total_sets"'
}
# Use the base pagination method
return self.paginate(
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order,
list_query=list_query,
field_mapping=field_mapping,
**filter_context
)
# Minifigures with a part damaged part # Minifigures with a part damaged part
def damaged_part(self, part: str, color: int, /) -> Self: def damaged_part(self, part: str, color: int, /) -> Self:
# Save the parameters to the fields # Save the parameters to the fields
@@ -95,16 +180,19 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
brickset = None brickset = None
# Prepare template context for owner filtering # Prepare template context for owner filtering
context = {} context_vars = {}
if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None: if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None:
context['owner_id'] = self.fields.owner_id context_vars['owner_id'] = self.fields.owner_id
# Merge with any additional context passed in
context_vars.update(context)
# Load the sets from the database # Load the sets from the database
for record in super().select( for record in super().select(
override_query=override_query, override_query=override_query,
order=order, order=order,
limit=limit, limit=limit,
**context **context_vars
): ):
minifigure = BrickMinifigure(brickset=brickset, record=record) minifigure = BrickMinifigure(brickset=brickset, record=record)

View File

@@ -15,6 +15,7 @@ NAVBAR: Final[list[dict[str, Any]]] = [
{'e': 'minifigure.list', 't': 'Minifigures', 'i': 'group-line', 'f': 'HIDE_ALL_MINIFIGURES'}, # noqa: E501 {'e': 'minifigure.list', 't': 'Minifigures', 'i': 'group-line', 'f': 'HIDE_ALL_MINIFIGURES'}, # noqa: E501
{'e': 'instructions.list', 't': 'Instructions', 'i': 'file-line', 'f': 'HIDE_ALL_INSTRUCTIONS'}, # noqa: E501 {'e': 'instructions.list', 't': 'Instructions', 'i': 'file-line', 'f': 'HIDE_ALL_INSTRUCTIONS'}, # noqa: E501
{'e': 'storage.list', 't': 'Storages', 'i': 'archive-2-line', 'f': 'HIDE_ALL_STORAGES'}, # noqa: E501 {'e': 'storage.list', 't': 'Storages', 'i': 'archive-2-line', 'f': 'HIDE_ALL_STORAGES'}, # noqa: E501
{'e': 'statistics.overview', 't': 'Statistics', 'i': 'bar-chart-line', 'f': 'HIDE_STATISTICS'}, # noqa: E501
{'e': 'wish.list', 't': 'Wishlist', 'i': 'gift-line', 'f': 'HIDE_WISHES'}, {'e': 'wish.list', 't': 'Wishlist', 'i': 'gift-line', 'f': 'HIDE_WISHES'},
{'e': 'admin.admin', 't': 'Admin', 'i': 'settings-4-line', 'f': 'HIDE_ADMIN'}, # noqa: E501 {'e': 'admin.admin', 't': 'Admin', 'i': 'settings-4-line', 'f': 'HIDE_ADMIN'}, # noqa: E501
] ]

View File

@@ -0,0 +1,52 @@
from flask import current_app, request
from typing import Any, Dict, Tuple
def get_pagination_config(entity_type: str) -> Tuple[int, bool]:
"""Get pagination configuration for an entity type (sets, parts, minifigures)"""
# Check if pagination is enabled for this specific entity type
pagination_key = f'{entity_type.upper()}_SERVER_SIDE_PAGINATION'
use_pagination = current_app.config.get(pagination_key, False)
if not use_pagination:
return 0, False
# Determine page size based on device type and entity
user_agent = request.headers.get('User-Agent', '').lower()
is_mobile = any(device in user_agent for device in ['mobile', 'android', 'iphone', 'ipad'])
# Get appropriate config keys based on entity type
entity_upper = entity_type.upper()
desktop_key = f'{entity_upper}_PAGINATION_SIZE_DESKTOP'
mobile_key = f'{entity_upper}_PAGINATION_SIZE_MOBILE'
per_page = current_app.config[mobile_key] if is_mobile else current_app.config[desktop_key]
return per_page, is_mobile
def build_pagination_context(page: int, per_page: int, total_count: int, is_mobile: bool) -> Dict[str, Any]:
"""Build pagination context for templates"""
total_pages = (total_count + per_page - 1) // per_page if total_count > 0 else 1
has_prev = page > 1
has_next = page < total_pages
return {
'page': page,
'per_page': per_page,
'total_count': total_count,
'total_pages': total_pages,
'has_prev': has_prev,
'has_next': has_next,
'is_mobile': is_mobile
}
def get_request_params() -> Tuple[str, str, str, int]:
"""Extract common request parameters for pagination"""
search_query = request.args.get('search', '').strip()
sort_field = request.args.get('sort', '')
sort_order = request.args.get('order', 'asc')
page = int(request.args.get('page', 1))
return search_query, sort_field, sort_order, page

View File

@@ -5,33 +5,29 @@ from .exceptions import ErrorException
def parse_set(set: str, /) -> str: def parse_set(set: str, /) -> str:
number, _, version = set.partition('-') number, _, version = set.partition('-')
# Making sure both are integers # Set number can be alphanumeric (e.g., "McDR6US", "10312", "COMCON035")
# Just validate it's not empty
if not number or number.strip() == '':
raise ErrorException('Set number cannot be empty')
# Clean up the number (trim whitespace)
number = number.strip()
# Version defaults to 1 if not provided
if version == '': if version == '':
version = 1 version = '1'
# Version must be a positive integer
try: try:
number = int(number) version_int = int(version)
except Exception:
raise ErrorException('Number "{number}" is not a number'.format(
number=number,
))
try:
version = int(version)
except Exception: except Exception:
raise ErrorException('Version "{version}" is not a number'.format( raise ErrorException('Version "{version}" is not a number'.format(
version=version, version=version,
)) ))
# Make sure both are positive if version_int < 0:
if number < 0: raise ErrorException('Version "{version}" should be positive'.format(
raise ErrorException('Number "{number}" should be positive'.format(
number=number,
))
if version < 0:
raise ErrorException('Version "{version}" should be positive'.format( # noqa: E501
version=version, version=version,
)) ))
return '{number}-{version}'.format(number=number, version=version) return '{number}-{version}'.format(number=number, version=version_int)

View File

@@ -159,6 +159,43 @@ class BrickPart(RebrickablePart):
return self return self
# Update checked state for part walkthrough
def update_checked(self, json: Any | None, /) -> bool:
# Handle both direct 'checked' key and changer.js 'value' key format
if json:
checked = json.get('checked', json.get('value', False))
else:
checked = False
checked = bool(checked)
# Update the field
self.fields.checked = checked
BrickSQL().execute_and_commit(
'part/update/checked',
parameters=self.sql_parameters()
)
return checked
# Compute the url for updating checked state
def url_for_checked(self, /) -> str:
# Different URL for a minifigure part
if self.minifigure is not None:
figure = self.minifigure.fields.figure
else:
figure = None
return url_for(
'set.checked_part',
id=self.fields.id,
figure=figure,
part=self.fields.part,
color=self.fields.color,
spare=self.fields.spare,
)
# Update a problematic part # Update a problematic part
def update_problem(self, problem: str, json: Any | None, /) -> int: def update_problem(self, problem: str, json: Any | None, /) -> int:
amount: str | int = json.get('value', '') # type: ignore amount: str | int = json.get('value', '') # type: ignore

View File

@@ -57,8 +57,8 @@ class BrickPartList(BrickRecordList[BrickPart]):
return self return self
# Load all parts with filters (owner and/or color) # Load all parts with filters (owner, color, theme, year)
def all_filtered(self, owner_id: str | None = None, color_id: str | None = None, /) -> Self: def all_filtered(self, owner_id: str | None = None, color_id: str | None = None, theme_id: str | None = None, year: str | None = None, /) -> Self:
# Save the filter parameters # Save the filter parameters
if owner_id is not None: if owner_id is not None:
self.fields.owner_id = owner_id self.fields.owner_id = owner_id
@@ -71,11 +71,76 @@ class BrickPartList(BrickRecordList[BrickPart]):
else: else:
query = self.all_query query = self.all_query
# Prepare context for query
context = {}
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
# Load the parts from the database # Load the parts from the database
self.list(override_query=query) self.list(override_query=query, **context)
return self return self
# Load parts with pagination support
def all_filtered_paginated(
self,
owner_id: str | None = None,
color_id: str | None = None,
theme_id: str | None = None,
year: str | None = None,
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
sort_field: str | None = None,
sort_order: str = 'asc'
) -> tuple[Self, int]:
# Prepare filter context
filter_context = {}
if owner_id and owner_id != 'all':
filter_context['owner_id'] = owner_id
list_query = self.all_by_owner_query
else:
list_query = self.all_query
if color_id and color_id != 'all':
filter_context['color_id'] = color_id
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
if search_query:
filter_context['search_query'] = search_query
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
filter_context['skip_spare_parts'] = True
# Field mapping for sorting
field_mapping = {
'name': '"rebrickable_parts"."name"',
'color': '"rebrickable_parts"."color_name"',
'quantity': '"total_quantity"',
'missing': '"total_missing"',
'damaged': '"total_damaged"',
'sets': '"total_sets"',
'minifigures': '"total_minifigures"'
}
# Use the base pagination method
return self.paginate(
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order,
list_query=list_query,
field_mapping=field_mapping,
**filter_context
)
# Base part list # Base part list
def list( def list(
self, self,
@@ -84,6 +149,7 @@ class BrickPartList(BrickRecordList[BrickPart]):
override_query: str | None = None, override_query: str | None = None,
order: str | None = None, order: str | None = None,
limit: int | None = None, limit: int | None = None,
offset: int | None = None,
**context: Any, **context: Any,
) -> None: ) -> None:
if order is None: if order is None:
@@ -105,12 +171,18 @@ class BrickPartList(BrickRecordList[BrickPart]):
context_vars['owner_id'] = self.fields.owner_id context_vars['owner_id'] = self.fields.owner_id
if hasattr(self.fields, 'color_id') and self.fields.color_id is not None: if hasattr(self.fields, 'color_id') and self.fields.color_id is not None:
context_vars['color_id'] = self.fields.color_id context_vars['color_id'] = self.fields.color_id
if hasattr(self.fields, 'search_query') and self.fields.search_query:
context_vars['search_query'] = self.fields.search_query
# Merge with any additional context passed in
context_vars.update(context)
# Load the sets from the database # Load the sets from the database
for record in super().select( for record in super().select(
override_query=override_query, override_query=override_query,
order=order, order=order,
limit=limit, limit=limit,
offset=offset,
**context_vars **context_vars
): ):
part = BrickPart( part = BrickPart(
@@ -119,9 +191,6 @@ class BrickPartList(BrickRecordList[BrickPart]):
record=record, record=record,
) )
if current_app.config['SKIP_SPARE_PARTS'] and part.fields.spare:
continue
self.records.append(part) self.records.append(part)
# List specific parts from a brickset or minifigure # List specific parts from a brickset or minifigure
@@ -136,8 +205,13 @@ class BrickPartList(BrickRecordList[BrickPart]):
self.brickset = brickset self.brickset = brickset
self.minifigure = minifigure self.minifigure = minifigure
# Prepare context for hiding spare parts if configured
context = {}
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the parts from the database # Load the parts from the database
self.list() self.list(**context)
return self return self
@@ -150,8 +224,13 @@ class BrickPartList(BrickRecordList[BrickPart]):
# Save the minifigure # Save the minifigure
self.minifigure = minifigure self.minifigure = minifigure
# Prepare context for hiding spare parts if configured
context = {}
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the parts from the database # Load the parts from the database
self.list(override_query=self.minifigure_query) self.list(override_query=self.minifigure_query, **context)
return self return self
@@ -181,6 +260,92 @@ class BrickPartList(BrickRecordList[BrickPart]):
return self return self
def problem_filtered(self, owner_id: str | None = None, color_id: str | None = None, theme_id: str | None = None, year: str | None = None, storage_id: str | None = None, tag_id: str | None = None, /) -> Self:
# Save the filter parameters for client-side filtering
if owner_id is not None:
self.fields.owner_id = owner_id
if color_id is not None:
self.fields.color_id = color_id
# Prepare context for query
context = {}
if owner_id and owner_id != 'all':
context['owner_id'] = owner_id
if color_id and color_id != 'all':
context['color_id'] = color_id
if theme_id and theme_id != 'all':
context['theme_id'] = theme_id
if year and year != 'all':
context['year'] = year
if storage_id and storage_id != 'all':
context['storage_id'] = storage_id
if tag_id and tag_id != 'all':
context['tag_id'] = tag_id
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
context['skip_spare_parts'] = True
# Load the problematic parts from the database
self.list(override_query=self.problem_query, **context)
return self
def problem_paginated(
self,
owner_id: str | None = None,
color_id: str | None = None,
theme_id: str | None = None,
year: str | None = None,
storage_id: str | None = None,
tag_id: str | None = None,
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
sort_field: str | None = None,
sort_order: str = 'asc'
) -> tuple[Self, int]:
# Prepare filter context
filter_context = {}
if owner_id and owner_id != 'all':
filter_context['owner_id'] = owner_id
if color_id and color_id != 'all':
filter_context['color_id'] = color_id
if theme_id and theme_id != 'all':
filter_context['theme_id'] = theme_id
if year and year != 'all':
filter_context['year'] = year
if storage_id and storage_id != 'all':
filter_context['storage_id'] = storage_id
if tag_id and tag_id != 'all':
filter_context['tag_id'] = tag_id
if search_query:
filter_context['search_query'] = search_query
# Hide spare parts from display if configured
if current_app.config.get('HIDE_SPARE_PARTS', False):
filter_context['skip_spare_parts'] = True
# Field mapping for sorting
field_mapping = {
'name': '"rebrickable_parts"."name"',
'color': '"rebrickable_parts"."color_name"',
'quantity': '"total_quantity"',
'missing': '"total_missing"',
'damaged': '"total_damaged"',
'sets': '"total_sets"',
'minifigures': '"total_minifigures"'
}
# Use the base pagination method with problem query
return self.paginate(
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order,
list_query=self.problem_query,
field_mapping=field_mapping,
**filter_context
)
# Return a dict with common SQL parameters for a parts list # Return a dict with common SQL parameters for a parts list
def sql_parameters(self, /) -> dict[str, Any]: def sql_parameters(self, /) -> dict[str, Any]:
parameters: dict[str, Any] = super().sql_parameters() parameters: dict[str, Any] = super().sql_parameters()
@@ -256,7 +421,13 @@ class BrickPartList(BrickRecordList[BrickPart]):
# Process each part # Process each part
number_of_parts: int = 0 number_of_parts: int = 0
skip_spares = current_app.config.get('SKIP_SPARE_PARTS', False)
for part in inventory: for part in inventory:
# Skip spare parts if configured
if skip_spares and part.fields.spare:
continue
# Count the number of parts for minifigures # Count the number of parts for minifigures
if minifigure is not None: if minifigure is not None:
number_of_parts += part.fields.quantity number_of_parts += part.fields.quantity

View File

@@ -0,0 +1,436 @@
import hashlib
import logging
import os
from pathlib import Path
import time
from typing import Any, NamedTuple, TYPE_CHECKING
from urllib.parse import urljoin
from bs4 import BeautifulSoup
from flask import current_app, url_for
import requests
from .exceptions import ErrorException
if TYPE_CHECKING:
from .socket import BrickSocket
logger = logging.getLogger(__name__)
def get_peeron_user_agent():
"""Get the User-Agent string for Peeron requests from config"""
return current_app.config.get('REBRICKABLE_USER_AGENT',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36')
def get_peeron_download_delay():
"""Get the delay in milliseconds between Peeron page downloads from config"""
return current_app.config.get('PEERON_DOWNLOAD_DELAY', 1000)
def get_min_image_size():
"""Get the minimum image size for valid Peeron instruction pages from config"""
return current_app.config.get('PEERON_MIN_IMAGE_SIZE', 100)
def get_peeron_instruction_url(set_number: str, version_number: str):
"""Get the Peeron instruction page URL using the configured pattern"""
pattern = current_app.config.get('PEERON_INSTRUCTION_PATTERN', 'http://peeron.com/scans/{set_number}-{version_number}')
return pattern.format(set_number=set_number, version_number=version_number)
def get_peeron_thumbnail_url(set_number: str, version_number: str):
"""Get the Peeron thumbnail base URL using the configured pattern"""
pattern = current_app.config.get('PEERON_THUMBNAIL_PATTERN', 'http://belay.peeron.com/thumbs/{set_number}-{version_number}/')
return pattern.format(set_number=set_number, version_number=version_number)
def get_peeron_scan_url(set_number: str, version_number: str):
"""Get the Peeron scan base URL using the configured pattern"""
pattern = current_app.config.get('PEERON_SCAN_PATTERN', 'http://belay.peeron.com/scans/{set_number}-{version_number}/')
return pattern.format(set_number=set_number, version_number=version_number)
def create_peeron_scraper():
"""Create a requests session configured for Peeron"""
session = requests.Session()
session.headers.update({
"User-Agent": get_peeron_user_agent()
})
return session
def get_peeron_cache_dir():
"""Get the base directory for Peeron caching"""
static_dir = Path(current_app.static_folder)
cache_dir = static_dir / 'images' / 'peeron_cache'
cache_dir.mkdir(parents=True, exist_ok=True)
return cache_dir
def get_set_cache_dir(set_number: str, version_number: str) -> tuple[Path, Path]:
"""Get cache directories for a specific set"""
base_cache_dir = get_peeron_cache_dir()
set_cache_key = f"{set_number}-{version_number}"
full_cache_dir = base_cache_dir / 'full' / set_cache_key
thumb_cache_dir = base_cache_dir / 'thumbs' / set_cache_key
full_cache_dir.mkdir(parents=True, exist_ok=True)
thumb_cache_dir.mkdir(parents=True, exist_ok=True)
return full_cache_dir, thumb_cache_dir
def cache_full_image_and_generate_thumbnail(image_url: str, page_number: str, set_number: str, version_number: str, session=None) -> tuple[str | None, str | None]:
"""
Download and cache full-size image, then generate a thumbnail preview.
Uses the full-size scan URLs from Peeron.
Returns (cached_image_path, thumbnail_url) or (None, None) if caching fails.
"""
try:
full_cache_dir, thumb_cache_dir = get_set_cache_dir(set_number, version_number)
full_filename = f"{page_number}.jpg"
thumb_filename = f"{page_number}.jpg"
full_cache_path = full_cache_dir / full_filename
thumb_cache_path = thumb_cache_dir / thumb_filename
# Return existing cached files if they exist
if full_cache_path.exists() and thumb_cache_path.exists():
set_cache_key = f"{set_number}-{version_number}"
thumbnail_url = url_for('static', filename=f'images/peeron_cache/thumbs/{set_cache_key}/{thumb_filename}')
return str(full_cache_path), thumbnail_url
# Download the full-size image using provided session or create new one
if session is None:
session = create_peeron_scraper()
response = session.get(image_url, timeout=30)
if response.status_code == 200 and len(response.content) > 0:
# Validate it's actually an image by checking minimum size
min_size = get_min_image_size()
if len(response.content) < min_size:
logger.warning(f"Image too small, skipping cache: {image_url}")
return None, None
# Write full-size image to cache
with open(full_cache_path, 'wb') as f:
f.write(response.content)
logger.debug(f"Cached full image: {image_url} -> {full_cache_path}")
# Generate thumbnail from the cached full image
try:
from PIL import Image
with Image.open(full_cache_path) as img:
# Create thumbnail (max 150px on longest side to match template)
img.thumbnail((150, 150), Image.Resampling.LANCZOS)
img.save(thumb_cache_path, 'JPEG', quality=85)
logger.debug(f"Generated thumbnail: {full_cache_path} -> {thumb_cache_path}")
set_cache_key = f"{set_number}-{version_number}"
thumbnail_url = url_for('static', filename=f'images/peeron_cache/thumbs/{set_cache_key}/{thumb_filename}')
return str(full_cache_path), thumbnail_url
except Exception as thumb_error:
logger.error(f"Failed to generate thumbnail for {page_number}: {thumb_error}")
# Clean up the full image if thumbnail generation failed
if full_cache_path.exists():
full_cache_path.unlink()
return None, None
else:
logger.warning(f"Failed to download full image: {image_url}")
return None, None
except Exception as e:
logger.error(f"Error caching full image {image_url}: {e}")
return None, None
def clear_set_cache(set_number: str, version_number: str) -> int:
"""
Clear all cached files for a specific set after PDF generation.
Returns the number of files deleted.
"""
try:
full_cache_dir, thumb_cache_dir = get_set_cache_dir(set_number, version_number)
deleted_count = 0
# Delete full images
if full_cache_dir.exists():
for cache_file in full_cache_dir.glob('*.jpg'):
try:
cache_file.unlink()
deleted_count += 1
logger.debug(f"Deleted cached full image: {cache_file}")
except OSError as e:
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
# Remove directory if empty
try:
full_cache_dir.rmdir()
except OSError:
pass # Directory not empty or other error
# Delete thumbnails
if thumb_cache_dir.exists():
for cache_file in thumb_cache_dir.glob('*.jpg'):
try:
cache_file.unlink()
deleted_count += 1
logger.debug(f"Deleted cached thumbnail: {cache_file}")
except OSError as e:
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
# Remove directory if empty
try:
thumb_cache_dir.rmdir()
except OSError:
pass # Directory not empty or other error
# Try to remove set directory if empty
try:
set_cache_key = f"{set_number}-{version_number}"
full_cache_dir.parent.rmdir() if full_cache_dir.parent.name == set_cache_key else None
thumb_cache_dir.parent.rmdir() if thumb_cache_dir.parent.name == set_cache_key else None
except OSError:
pass # Directory not empty or other error
logger.info(f"Set cache cleanup completed for {set_number}-{version_number}: {deleted_count} files deleted")
return deleted_count
except Exception as e:
logger.error(f"Error during set cache cleanup for {set_number}-{version_number}: {e}")
return 0
def clear_old_cache(max_age_days: int = 7) -> int:
"""
Clear old cache files across all sets.
Returns the number of files deleted.
"""
try:
base_cache_dir = get_peeron_cache_dir()
if not base_cache_dir.exists():
return 0
deleted_count = 0
max_age_seconds = max_age_days * 24 * 60 * 60
current_time = time.time()
# Clean both full and thumbs directories
for cache_type in ['full', 'thumbs']:
cache_type_dir = base_cache_dir / cache_type
if cache_type_dir.exists():
for set_dir in cache_type_dir.iterdir():
if set_dir.is_dir():
for cache_file in set_dir.glob('*.jpg'):
file_age = current_time - os.path.getmtime(cache_file)
if file_age > max_age_seconds:
try:
cache_file.unlink()
deleted_count += 1
logger.debug(f"Deleted old cache file: {cache_file}")
except OSError as e:
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
# Remove empty directories
try:
if not any(set_dir.iterdir()):
set_dir.rmdir()
except OSError:
pass
logger.info(f"Old cache cleanup completed: {deleted_count} files deleted")
return deleted_count
except Exception as e:
logger.error(f"Error during old cache cleanup: {e}")
return 0
class PeeronPage(NamedTuple):
"""Represents a single instruction page from Peeron"""
page_number: str
original_image_url: str # Original Peeron full-size image URL
cached_full_image_path: str # Local full-size cached image path
cached_thumbnail_url: str # Local thumbnail URL for preview
alt_text: str
rotation: int = 0 # Rotation in degrees (0, 90, 180, 270)
# Peeron instruction scraper
class PeeronInstructions(object):
socket: 'BrickSocket | None'
set_number: str
version_number: str
pages: list[PeeronPage]
def __init__(
self,
set_number: str,
version_number: str = '1',
/,
*,
socket: 'BrickSocket | None' = None,
):
# Save the socket
self.socket = socket
# Parse set number (handle both "4011" and "4011-1" formats)
if '-' in set_number:
parts = set_number.split('-', 1)
self.set_number = parts[0]
self.version_number = parts[1] if len(parts) > 1 else '1'
else:
self.set_number = set_number
self.version_number = version_number
# Placeholder for pages
self.pages = []
# Check if instructions exist on Peeron (lightweight)
def exists(self, /) -> bool:
"""Check if the set exists on Peeron without caching thumbnails"""
try:
base_url = get_peeron_instruction_url(self.set_number, self.version_number)
scraper = create_peeron_scraper()
response = scraper.get(base_url)
if response.status_code != 200:
return False
soup = BeautifulSoup(response.text, 'html.parser')
# Check for "Browse instruction library" header (set not found)
if soup.find('h1', string="Browse instruction library"):
return False
# Look for thumbnail images to confirm instructions exist
thumbnails = soup.select('table[cellspacing="5"] a img[src^="http://belay.peeron.com/thumbs/"]')
return len(thumbnails) > 0
except Exception:
return False
# Find all available instruction pages on Peeron
def find_pages(self, /) -> list[PeeronPage]:
"""
Scrape Peeron's HTML and return a list of available instruction pages.
Similar to BrickInstructions.find_instructions() but for Peeron.
"""
base_url = get_peeron_instruction_url(self.set_number, self.version_number)
thumb_base_url = get_peeron_thumbnail_url(self.set_number, self.version_number)
scan_base_url = get_peeron_scan_url(self.set_number, self.version_number)
logger.debug(f"[find_pages] fetching HTML from {base_url!r}")
# Set up session with persistent cookies for Peeron (like working dl_peeron.py)
scraper = create_peeron_scraper()
# Download the main HTML page to establish session and cookies
try:
logger.debug(f"[find_pages] Establishing session by visiting: {base_url}")
response = scraper.get(base_url)
logger.debug(f"[find_pages] Main page visit: HTTP {response.status_code}")
if response.status_code != 200:
raise ErrorException(f'Failed to load Peeron page for {self.set_number}-{self.version_number}. HTTP {response.status_code}')
except requests.exceptions.RequestException as e:
raise ErrorException(f'Failed to connect to Peeron: {e}')
# Parse HTML to locate instruction pages
soup = BeautifulSoup(response.text, 'html.parser')
# Check for "Browse instruction library" header (set not found)
if soup.find('h1', string="Browse instruction library"):
raise ErrorException(f'Set {self.set_number}-{self.version_number} not found on Peeron')
# Locate all thumbnail images in the expected table structure
# Use the configured thumbnail pattern to build the expected URL prefix
thumb_base_url = get_peeron_thumbnail_url(self.set_number, self.version_number)
thumbnails = soup.select(f'table[cellspacing="5"] a img[src^="{thumb_base_url}"]')
if not thumbnails:
raise ErrorException(f'No instruction pages found for {self.set_number}-{self.version_number} on Peeron')
pages: list[PeeronPage] = []
total_thumbnails = len(thumbnails)
# Initialize progress if socket is available
if self.socket:
self.socket.progress_total = total_thumbnails
self.socket.progress_count = 0
self.socket.progress(message=f"Starting to cache {total_thumbnails} full images")
for idx, img in enumerate(thumbnails, 1):
thumb_url = img['src']
# Extract the page number from the thumbnail URL
page_number = thumb_url.split('/')[-2]
# Build the full-size scan URL using the page number
full_size_url = f"{scan_base_url}{page_number}/"
logger.debug(f"[find_pages] Page {page_number}: thumb={thumb_url}, full_size={full_size_url}")
# Create alt text for the page
alt_text = f"LEGO Instructions {self.set_number}-{self.version_number} Page {page_number}"
# Report progress if socket is available
if self.socket:
self.socket.progress_count = idx
self.socket.progress(message=f"Caching full image {idx} of {total_thumbnails}")
# Cache the full-size image and generate thumbnail preview using established session
cached_full_path, cached_thumb_url = cache_full_image_and_generate_thumbnail(
full_size_url, page_number, self.set_number, self.version_number, session=scraper
)
# Skip this page if caching failed
if not cached_full_path or not cached_thumb_url:
logger.warning(f"[find_pages] Skipping page {page_number} due to caching failure")
continue
page = PeeronPage(
page_number=page_number,
original_image_url=full_size_url,
cached_full_image_path=cached_full_path,
cached_thumbnail_url=cached_thumb_url,
alt_text=alt_text
)
pages.append(page)
# Cache the pages for later use
self.pages = pages
logger.debug(f"[find_pages] found {len(pages)} pages for {self.set_number}-{self.version_number}")
return pages
# Find instructions with fallback to Peeron
@staticmethod
def find_instructions_with_peeron_fallback(set: str, /) -> tuple[list[tuple[str, str]], list[PeeronPage] | None]:
"""
Enhanced version of BrickInstructions.find_instructions() that falls back to Peeron.
Returns (rebrickable_instructions, peeron_pages).
If rebrickable_instructions is empty, peeron_pages will contain Peeron data.
"""
from .instructions import BrickInstructions
# First try Rebrickable
try:
rebrickable_instructions = BrickInstructions.find_instructions(set)
return rebrickable_instructions, None
except ErrorException as e:
logger.info(f"Rebrickable failed for {set}: {e}. Trying Peeron fallback...")
# Fallback to Peeron
try:
peeron = PeeronInstructions(set)
peeron_pages = peeron.find_pages()
return [], peeron_pages
except ErrorException as peeron_error:
# Both failed, re-raise original Rebrickable error
logger.info(f"Peeron also failed for {set}: {peeron_error}")
raise e from peeron_error

204
bricktracker/peeron_pdf.py Normal file
View File

@@ -0,0 +1,204 @@
import logging
import os
import tempfile
import time
from typing import Any, TYPE_CHECKING
from flask import current_app
from PIL import Image
from .exceptions import DownloadException, ErrorException
from .instructions import BrickInstructions
from .peeron_instructions import PeeronPage, get_min_image_size, get_peeron_download_delay, get_peeron_instruction_url, create_peeron_scraper
if TYPE_CHECKING:
from .socket import BrickSocket
logger = logging.getLogger(__name__)
# PDF generator for Peeron instruction pages
class PeeronPDF(object):
socket: 'BrickSocket'
set_number: str
version_number: str
pages: list[PeeronPage]
filename: str
def __init__(
self,
set_number: str,
version_number: str,
pages: list[PeeronPage],
/,
*,
socket: 'BrickSocket',
):
# Save the socket
self.socket = socket
# Save set information
self.set_number = set_number
self.version_number = version_number
self.pages = pages
# Generate filename following BrickTracker conventions
self.filename = f"{set_number}-{version_number}_peeron.pdf"
# Download pages and create PDF
def create_pdf(self, /) -> None:
"""
Downloads selected Peeron pages and merges them into a PDF.
Uses progress updates via socket similar to BrickInstructions.download()
"""
try:
target_path = self._get_target_path()
# Skip if we already have it
if os.path.isfile(target_path):
# Create BrickInstructions instance to get PDF URL
instructions = BrickInstructions(self.filename)
pdf_url = instructions.url()
return self.socket.complete(
message=f'File {self.filename} already exists, skipped - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
)
# Set up progress tracking
total_pages = len(self.pages)
self.socket.update_total(total_pages)
self.socket.progress_count = 0
self.socket.progress(message=f"Starting PDF creation from {total_pages} cached pages")
# Use cached images directly - no downloads needed!
cached_files_with_rotation = []
missing_pages = []
for i, page in enumerate(self.pages):
# Check if cached file exists
if os.path.isfile(page.cached_full_image_path):
cached_files_with_rotation.append((page.cached_full_image_path, page.rotation))
# Update progress
self.socket.progress_count += 1
self.socket.progress(
message=f"Processing cached page {page.page_number} ({i + 1}/{total_pages})"
)
else:
missing_pages.append(page.page_number)
logger.warning(f"Cached image missing for page {page.page_number}: {page.cached_full_image_path}")
if not cached_files_with_rotation:
raise DownloadException(f"No cached images available for set {self.set_number}-{self.version_number}. Cache may have been cleared.")
elif len(cached_files_with_rotation) < total_pages:
# Partial success
error_msg = f"Only found {len(cached_files_with_rotation)}/{total_pages} cached images."
if missing_pages:
error_msg += f" Missing pages: {', '.join(missing_pages)}."
logger.warning(error_msg)
# Create PDF from cached images with rotation
self._create_pdf_from_images(cached_files_with_rotation, target_path)
# Success
logger.info(f"Created PDF {self.filename} with {len(cached_files_with_rotation)} pages")
# Create BrickInstructions instance to get PDF URL
instructions = BrickInstructions(self.filename)
pdf_url = instructions.url()
self.socket.complete(
message=f'PDF {self.filename} created with {len(cached_files_with_rotation)} pages - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
)
# Clean up set cache after successful PDF creation
try:
from .peeron_instructions import clear_set_cache
deleted_count = clear_set_cache(self.set_number, self.version_number)
if deleted_count > 0:
logger.info(f"[create_pdf] Cleaned up {deleted_count} cache files for set {self.set_number}-{self.version_number}")
except Exception as e:
logger.warning(f"[create_pdf] Failed to clean set cache: {e}")
except Exception as e:
logger.error(f"Error creating PDF {self.filename}: {e}")
self.socket.fail(
message=f"Error creating PDF {self.filename}: {e}"
)
# Create PDF from downloaded images
def _create_pdf_from_images(self, image_paths_and_rotations: list[tuple[str, int]], output_path: str, /) -> None:
"""Create a PDF from a list of image files with their rotations"""
try:
# Import FPDF (should be available from requirements)
from fpdf import FPDF
except ImportError:
raise ErrorException("FPDF library not available. Install with: pip install fpdf2")
pdf = FPDF()
for i, (img_path, rotation) in enumerate(image_paths_and_rotations):
try:
# Open image and apply rotation if needed
with Image.open(img_path) as image:
# Apply rotation if specified
if rotation != 0:
# PIL rotation is counter-clockwise, so we negate for clockwise rotation
image = image.rotate(-rotation, expand=True)
width, height = image.size
# Add page with image dimensions (convert pixels to mm)
# 1 pixel = 0.264583 mm (assuming 96 DPI)
page_width = width * 0.264583
page_height = height * 0.264583
pdf.add_page(format=(page_width, page_height))
# Save rotated image to temporary file for FPDF
temp_rotated_path = None
if rotation != 0:
import tempfile
temp_fd, temp_rotated_path = tempfile.mkstemp(suffix='.jpg', prefix=f'peeron_rotated_{i}_')
try:
os.close(temp_fd) # Close file descriptor, we'll use the path
image.save(temp_rotated_path, 'JPEG', quality=95)
pdf.image(temp_rotated_path, x=0, y=0, w=page_width, h=page_height)
finally:
# Clean up rotated temp file
if temp_rotated_path and os.path.exists(temp_rotated_path):
os.remove(temp_rotated_path)
else:
pdf.image(img_path, x=0, y=0, w=page_width, h=page_height)
# Update progress
progress_msg = f"Processing page {i + 1}/{len(image_paths_and_rotations)} into PDF"
if rotation != 0:
progress_msg += f" (rotated {rotation}°)"
self.socket.progress(message=progress_msg)
except Exception as e:
logger.warning(f"Failed to add image {img_path} to PDF: {e}")
continue
# Save the PDF
pdf.output(output_path)
# Get target file path
def _get_target_path(self, /) -> str:
"""Get the full path where the PDF should be saved"""
folder = current_app.config['INSTRUCTIONS_FOLDER']
# If folder is absolute, use it directly
# Otherwise, make it relative to app root (not static folder)
if os.path.isabs(folder):
instructions_folder = folder
else:
instructions_folder = os.path.join(current_app.root_path, folder)
return os.path.join(instructions_folder, self.filename)
# Create BrickInstructions instance for the generated PDF
def get_instructions(self, /) -> BrickInstructions:
"""Return a BrickInstructions instance for the generated PDF"""
return BrickInstructions(self.filename)

View File

@@ -53,6 +53,23 @@ class RebrickableImage(object):
if os.path.exists(path): if os.path.exists(path):
return return
# Check if the original image field is null - copy nil placeholder instead
if self.part is not None and self.part.fields.image is None:
return
if self.minifigure is not None and self.minifigure.fields.image is None:
return
if self.set.fields.image is None:
# Copy nil.png from parts folder to sets folder with set number as filename
parts_folder = current_app.config['PARTS_FOLDER']
if not os.path.isabs(parts_folder):
parts_folder = os.path.join(current_app.root_path, parts_folder)
nil_source = os.path.join(parts_folder, f"{RebrickableImage.nil_name()}.{self.extension}")
if os.path.exists(nil_source):
import shutil
shutil.copy2(nil_source, path)
return
url = self.url() url = self.url()
if url is None: if url is None:
return return
@@ -96,9 +113,16 @@ class RebrickableImage(object):
# Return the path depending on the objects provided # Return the path depending on the objects provided
def path(self, /) -> str: def path(self, /) -> str:
folder = self.folder()
# If folder is an absolute path (starts with /), use it directly
# Otherwise, make it relative to app root (current_app.root_path)
if folder.startswith('/'):
base_path = folder
else:
base_path = os.path.join(current_app.root_path, folder)
return os.path.join( return os.path.join(
current_app.static_folder, # type: ignore base_path,
self.folder(),
'{id}.{ext}'.format(id=self.id(), ext=self.extension), '{id}.{ext}'.format(id=self.id(), ext=self.extension),
) )
@@ -116,7 +140,11 @@ class RebrickableImage(object):
else: else:
return self.minifigure.fields.image return self.minifigure.fields.image
return self.set.fields.image # Handle set images - use nil placeholder if image is null
if self.set.fields.image is None:
return current_app.config['REBRICKABLE_IMAGE_NIL']
else:
return self.set.fields.image
# Return the name of the nil image file # Return the name of the nil image file
@staticmethod @staticmethod
@@ -152,10 +180,21 @@ class RebrickableImage(object):
# _, extension = os.path.splitext(self.part_img_url) # _, extension = os.path.splitext(self.part_img_url)
extension = '.jpg' extension = '.jpg'
# Compute the path # Determine which route to use based on folder path
path = os.path.join(folder, '{name}{ext}'.format( # If folder contains 'data' (new structure), use data route
name=name, # Otherwise use static route (legacy - relative paths like 'parts', 'sets')
ext=extension, if 'data' in folder:
)) # Extract the folder type from the folder_name config key
# E.g., 'PARTS_FOLDER' -> 'parts', 'SETS_FOLDER' -> 'sets'
return url_for('static', filename=path) folder_type = folder_name.replace('_FOLDER', '').lower()
filename = '{name}{ext}'.format(name=name, ext=extension)
return url_for('data.serve_data_file', folder=folder_type, filename=filename)
else:
# Legacy: folder is relative to static/ (e.g., 'parts' or 'static/parts')
# Strip 'static/' prefix if present to avoid double /static/ in URL
folder_clean = folder.removeprefix('static/')
path = os.path.join(folder_clean, '{name}{ext}'.format(
name=name,
ext=extension,
))
return url_for('static', filename=path)

View File

@@ -98,7 +98,7 @@ class RebrickablePart(BrickRecord):
# Use BrickLink color ID if available and not None, otherwise fall back to Rebrickable color # Use BrickLink color ID if available and not None, otherwise fall back to Rebrickable color
bricklink_color = getattr(self.fields, 'bricklink_color_id', None) bricklink_color = getattr(self.fields, 'bricklink_color_id', None)
color_param = bricklink_color if bricklink_color is not None else self.fields.color color_param = bricklink_color if bricklink_color is not None else self.fields.color
print(f'BrickLink URL parameters: part={part_param}, color={color_param}') # Debugging line, can be removed later # print(f'BrickLink URL parameters: part={part_param}, color={color_param}') # Debugging line, can be removed later
return current_app.config['BRICKLINK_LINK_PART_PATTERN'].format( # noqa: E501 return current_app.config['BRICKLINK_LINK_PART_PATTERN'].format( # noqa: E501
part=part_param, part=part_param,
color=color_param, color=color_param,

View File

@@ -155,9 +155,18 @@ class RebrickableSet(BrickRecord):
# Return a short form of the Rebrickable set # Return a short form of the Rebrickable set
def short(self, /, *, from_download: bool = False) -> dict[str, Any]: def short(self, /, *, from_download: bool = False) -> dict[str, Any]:
# Use nil image URL if set image is null
image_url = self.fields.image
if image_url is None:
# Return path to nil.png from parts folder
image_url = RebrickableImage.static_url(
RebrickableImage.nil_name(),
'PARTS_FOLDER'
)
return { return {
'download': from_download, 'download': from_download,
'image': self.fields.image, 'image': image_url,
'name': self.fields.name, 'name': self.fields.name,
'set': self.fields.set, 'set': self.fields.set,
} }
@@ -179,6 +188,15 @@ class RebrickableSet(BrickRecord):
return '' return ''
# Compute the url for the bricklink page
def url_for_bricklink(self, /) -> str:
if current_app.config['BRICKLINK_LINKS']:
return current_app.config['BRICKLINK_LINK_SET_PATTERN'].format(
set_num=self.fields.set
)
return ''
# Compute the url for the refresh button # Compute the url for the refresh button
def url_for_refresh(self, /) -> str: def url_for_refresh(self, /) -> str:
return url_for('set.refresh', set=self.fields.set) return url_for('set.refresh', set=self.fields.set)
@@ -187,17 +205,18 @@ class RebrickableSet(BrickRecord):
@staticmethod @staticmethod
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]: def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
# Extracting version and number # Extracting version and number
# Note: number can be alphanumeric (e.g., "McDR6US", "COMCON035")
number, _, version = str(data['set_num']).partition('-') number, _, version = str(data['set_num']).partition('-')
return { return {
'set': str(data['set_num']), 'set': str(data['set_num']),
'number': int(number), 'number': str(number), # Keep as string to support alphanumeric sets
'version': int(version), 'version': int(version),
'name': str(data['name']), 'name': str(data['name']),
'year': int(data['year']), 'year': int(data['year']),
'theme_id': int(data['theme_id']), 'theme_id': int(data['theme_id']),
'number_of_parts': int(data['num_parts']), 'number_of_parts': int(data['num_parts']),
'image': str(data['set_img_url']), 'image': str(data['set_img_url']) if data['set_img_url'] is not None else None,
'url': str(data['set_url']), 'url': str(data['set_url']),
'last_modified': str(data['last_modified_dt']), 'last_modified': str(data['last_modified_dt']),
} }

View File

@@ -11,24 +11,19 @@ class RebrickableSetList(BrickRecordList[RebrickableSet]):
select_query: str = 'rebrickable/set/list' select_query: str = 'rebrickable/set/list'
refresh_query: str = 'rebrickable/set/need_refresh' refresh_query: str = 'rebrickable/set/need_refresh'
# All the sets # Implementation of abstract list method
def all(self, /) -> Self: def list(self, /, *, override_query: str | None = None, **context) -> None:
# Load the sets from the database # Load the sets from the database
for record in self.select(): for record in self.select(override_query=override_query, **context):
rebrickable_set = RebrickableSet(record=record) rebrickable_set = RebrickableSet(record=record)
self.records.append(rebrickable_set) self.records.append(rebrickable_set)
# All the sets
def all(self, /) -> Self:
self.list()
return self return self
# Sets needing refresh # Sets needing refresh
def need_refresh(self, /) -> Self: def need_refresh(self, /) -> Self:
# Load the sets from the database self.list(override_query=self.refresh_query)
for record in self.select(
override_query=self.refresh_query
):
rebrickable_set = RebrickableSet(record=record)
self.records.append(rebrickable_set)
return self return self

View File

@@ -1,5 +1,6 @@
import re
from sqlite3 import Row from sqlite3 import Row
from typing import Any, Generator, Generic, ItemsView, TypeVar, TYPE_CHECKING from typing import Any, Generator, Generic, ItemsView, Self, TypeVar, TYPE_CHECKING
from .fields import BrickRecordFields from .fields import BrickRecordFields
from .sql import BrickSQL from .sql import BrickSQL
@@ -72,6 +73,90 @@ class BrickRecordList(Generic[T]):
**context **context
) )
# Generic pagination method for all record lists
def paginate(
self,
page: int = 1,
per_page: int = 50,
sort_field: str | None = None,
sort_order: str = 'asc',
count_query: str | None = None,
list_query: str | None = None,
field_mapping: dict[str, str] | None = None,
**filter_context: Any
) -> tuple['Self', int]:
"""Generic pagination implementation for all record lists"""
from .sql import BrickSQL
# Use provided queries or fall back to defaults
list_query = list_query or getattr(self, 'all_query', None)
if not list_query:
raise NotImplementedError("Subclass must define all_query")
# Calculate offset
offset = (page - 1) * per_page
# Get total count by wrapping the main query
if count_query:
# Use provided count query
count_result = BrickSQL().fetchone(count_query, **filter_context)
total_count = count_result['total_count'] if count_result else 0
else:
# Generate count by wrapping the main query (without ORDER BY, LIMIT, OFFSET)
count_context = {k: v for k, v in filter_context.items()
if k not in ['order', 'limit', 'offset']}
# Get the main query SQL without pagination clauses
main_sql = BrickSQL().load_query(list_query, **count_context)
# Remove ORDER BY, LIMIT, OFFSET clauses for counting
# Remove ORDER BY clause and everything after it that's not part of subqueries
count_sql = re.sub(r'\s+ORDER\s+BY\s+[^)]*?(\s+LIMIT|\s+OFFSET|$)', r'\1', main_sql, flags=re.IGNORECASE)
# Remove LIMIT and OFFSET
count_sql = re.sub(r'\s+LIMIT\s+\d+', '', count_sql, flags=re.IGNORECASE)
count_sql = re.sub(r'\s+OFFSET\s+\d+', '', count_sql, flags=re.IGNORECASE)
# Wrap in COUNT(*)
wrapped_sql = f"SELECT COUNT(*) as total_count FROM ({count_sql.strip()})"
count_result = BrickSQL().raw_execute(wrapped_sql, {}).fetchone()
total_count = count_result['total_count'] if count_result else 0
# Prepare sort order
order_clause = None
if sort_field and field_mapping and sort_field in field_mapping:
sql_field = field_mapping[sort_field]
direction = 'DESC' if sort_order.lower() == 'desc' else 'ASC'
order_clause = f'{sql_field} {direction}'
# Build pagination context
pagination_context = {
'limit': per_page,
'offset': offset,
'order': order_clause or getattr(self, 'order', None),
**filter_context
}
# Load paginated results using the existing list() method
# Check if this is a set list that needs do_theme parameter
if hasattr(self, 'themes'): # Only BrickSetList has this attribute
self.list(override_query=list_query, do_theme=True, **pagination_context)
else:
self.list(override_query=list_query, **pagination_context)
return self, total_count
# Base method that subclasses can override
def list(
self,
/,
*,
override_query: str | None = None,
**context: Any,
) -> None:
"""Load records from database - should be implemented by subclasses that use pagination"""
raise NotImplementedError("Subclass must implement list() method")
# Generic SQL parameters from fields # Generic SQL parameters from fields
def sql_parameters(self, /) -> dict[str, Any]: def sql_parameters(self, /) -> dict[str, Any]:
parameters: dict[str, Any] = {} parameters: dict[str, Any] = {}

View File

@@ -59,6 +59,10 @@ class BrickSet(RebrickableSet):
# Generate an UUID for self # Generate an UUID for self
self.fields.id = str(uuid4()) self.fields.id = str(uuid4())
# Insert the rebrickable set into database FIRST
# This must happen before inserting bricktracker_sets due to FK constraint
self.insert_rebrickable()
if not refresh: if not refresh:
# Save the storage # Save the storage
storage = BrickSetStorageList.get( storage = BrickSetStorageList.get(
@@ -74,7 +78,8 @@ class BrickSet(RebrickableSet):
) )
self.fields.purchase_location = purchase_location.fields.id self.fields.purchase_location = purchase_location.fields.id
# Insert into database # Insert into database (deferred - will execute at final commit)
# All operations are atomic - if anything fails, nothing is committed
self.insert(commit=False) self.insert(commit=False)
# Save the owners # Save the owners
@@ -91,9 +96,6 @@ class BrickSet(RebrickableSet):
tag = BrickSetTagList.get(id) tag = BrickSetTagList.get(id)
tag.update_set_state(self, state=True) tag.update_set_state(self, state=True)
# Insert the rebrickable set into database
self.insert_rebrickable()
# Load the inventory # Load the inventory
if not BrickPartList.download(socket, self, refresh=refresh): if not BrickPartList.download(socket, self, refresh=refresh):
return False return False
@@ -169,6 +171,20 @@ class BrickSet(RebrickableSet):
else: else:
return '' return ''
# Purchase date max formatted for consolidated sets
def purchase_date_max_formatted(self, /, *, standard: bool = False) -> str:
if hasattr(self.fields, 'purchase_date_max') and self.fields.purchase_date_max is not None:
time = datetime.fromtimestamp(self.fields.purchase_date_max)
if standard:
return time.strftime('%Y/%m/%d')
else:
return time.strftime(
current_app.config['PURCHASE_DATE_FORMAT']
)
else:
return ''
# Purchase price with currency # Purchase price with currency
def purchase_price(self, /) -> str: def purchase_price(self, /) -> str:
if self.fields.purchase_price is not None: if self.fields.purchase_price is not None:

View File

@@ -13,14 +13,19 @@ from .set_storage_list import BrickSetStorageList
from .set_tag import BrickSetTag from .set_tag import BrickSetTag
from .set_tag_list import BrickSetTagList from .set_tag_list import BrickSetTagList
from .set import BrickSet from .set import BrickSet
from .theme_list import BrickThemeList
from .instructions_list import BrickInstructionsList
# All the sets from the database # All the sets from the database
class BrickSetList(BrickRecordList[BrickSet]): class BrickSetList(BrickRecordList[BrickSet]):
themes: list[str] themes: list[str]
years: list[int]
order: str order: str
# Queries # Queries
all_query: str = 'set/list/all'
consolidated_query: str = 'set/list/consolidated'
damaged_minifigure_query: str = 'set/list/damaged_minifigure' damaged_minifigure_query: str = 'set/list/damaged_minifigure'
damaged_part_query: str = 'set/list/damaged_part' damaged_part_query: str = 'set/list/damaged_part'
generic_query: str = 'set/list/generic' generic_query: str = 'set/list/generic'
@@ -37,17 +42,510 @@ class BrickSetList(BrickRecordList[BrickSet]):
# Placeholders # Placeholders
self.themes = [] self.themes = []
self.years = []
# Store the order for this list # Store the order for this list
self.order = current_app.config['SETS_DEFAULT_ORDER'] self.order = current_app.config['SETS_DEFAULT_ORDER']
# All the sets # All the sets
def all(self, /) -> Self: def all(self, /) -> Self:
# Load the sets from the database # Load the sets from the database with metadata context for filtering
self.list(do_theme=True) filter_context = {
'owners': BrickSetOwnerList.as_columns(),
'statuses': BrickSetStatusList.as_columns(),
'tags': BrickSetTagList.as_columns(),
}
self.list(do_theme=True, **filter_context)
return self return self
# All sets in consolidated/grouped view
def all_consolidated(self, /) -> Self:
# Load the sets from the database using consolidated query with metadata context
filter_context = {
'owners_dict': BrickSetOwnerList.as_column_mapping(),
'statuses_dict': BrickSetStatusList.as_column_mapping(),
'tags_dict': BrickSetTagList.as_column_mapping(),
}
self.list(override_query=self.consolidated_query, do_theme=True, **filter_context)
return self
# All sets with pagination and filtering
def all_filtered_paginated(
self,
search_query: str | None = None,
page: int = 1,
per_page: int = 50,
sort_field: str | None = None,
sort_order: str = 'asc',
status_filter: str | None = None,
theme_filter: str | None = None,
owner_filter: str | None = None,
purchase_location_filter: str | None = None,
storage_filter: str | None = None,
tag_filter: str | None = None,
year_filter: str | None = None,
duplicate_filter: bool = False,
use_consolidated: bool = True
) -> tuple[Self, int]:
# Convert theme name to theme ID for filtering
theme_id_filter = None
if theme_filter:
theme_id_filter = self._theme_name_to_id(theme_filter)
# Check if any filters are applied
has_filters = any([status_filter, theme_id_filter, owner_filter, purchase_location_filter, storage_filter, tag_filter, year_filter, duplicate_filter])
# Prepare filter context
filter_context = {
'search_query': search_query,
'status_filter': status_filter,
'theme_filter': theme_id_filter, # Use converted theme ID
'owner_filter': owner_filter,
'purchase_location_filter': purchase_location_filter,
'storage_filter': storage_filter,
'tag_filter': tag_filter,
'year_filter': year_filter,
'duplicate_filter': duplicate_filter,
'owners': BrickSetOwnerList.as_columns(),
'statuses': BrickSetStatusList.as_columns(),
'tags': BrickSetTagList.as_columns(),
'owners_dict': BrickSetOwnerList.as_column_mapping(),
'statuses_dict': BrickSetStatusList.as_column_mapping(),
'tags_dict': BrickSetTagList.as_column_mapping(),
}
# Field mapping for sorting
if use_consolidated:
field_mapping = {
'set': '"rebrickable_sets"."number", "rebrickable_sets"."version"',
'name': '"rebrickable_sets"."name"',
'year': '"rebrickable_sets"."year"',
'parts': '"rebrickable_sets"."number_of_parts"',
'theme': '"rebrickable_sets"."theme_id"',
'minifigures': '"total_minifigures"',
'missing': '"total_missing"',
'damaged': '"total_damaged"',
'instances': '"instance_count"', # New field for consolidated view
'purchase-date': '"purchase_date"', # Use the MIN aggregated value
'purchase-price': '"purchase_price"' # Use the MIN aggregated value
}
else:
field_mapping = {
'set': '"rebrickable_sets"."number", "rebrickable_sets"."version"',
'name': '"rebrickable_sets"."name"',
'year': '"rebrickable_sets"."year"',
'parts': '"rebrickable_sets"."number_of_parts"',
'theme': '"rebrickable_sets"."theme_id"',
'minifigures': '"total_minifigures"', # Use the alias from the SQL query
'missing': '"total_missing"', # Use the alias from the SQL query
'damaged': '"total_damaged"', # Use the alias from the SQL query
'purchase-date': '"bricktracker_sets"."purchase_date"',
'purchase-price': '"bricktracker_sets"."purchase_price"'
}
# Choose query based on consolidation preference and filter complexity
# Owner/tag filters still need to fall back to non-consolidated for now
# due to complex aggregation requirements
complex_filters = [owner_filter, tag_filter]
if use_consolidated and not any(complex_filters):
query_to_use = self.consolidated_query
else:
# Use filtered query when consolidation is disabled or complex filters applied
query_to_use = 'set/list/all_filtered'
# Handle instructions filtering
if status_filter in ['has-missing-instructions', '-has-missing-instructions']:
# For instructions filter, we need to load all sets first, then filter and paginate
return self._all_filtered_paginated_with_instructions(
search_query, page, per_page, sort_field, sort_order,
status_filter, theme_id_filter, owner_filter,
purchase_location_filter, storage_filter, tag_filter
)
# Handle special case for set sorting with multiple columns
if sort_field == 'set' and field_mapping:
# Create custom order clause for set sorting
direction = 'DESC' if sort_order.lower() == 'desc' else 'ASC'
custom_order = f'"rebrickable_sets"."number" {direction}, "rebrickable_sets"."version" {direction}'
filter_context['order'] = custom_order
# Remove set from field mapping to avoid double-processing
field_mapping_copy = field_mapping.copy()
field_mapping_copy.pop('set', None)
field_mapping = field_mapping_copy
sort_field = None # Disable automatic ORDER BY construction
# Normal SQL-based filtering and pagination
result, total_count = self.paginate(
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order,
list_query=query_to_use,
field_mapping=field_mapping,
**filter_context
)
# Populate themes and years for filter dropdown from filtered dataset (not just current page)
# For themes dropdown, exclude theme_filter to show ALL available themes
themes_context = filter_context.copy()
themes_context.pop('theme_filter', None)
result._populate_themes_from_filtered_dataset(
query_to_use,
**themes_context
)
# For years dropdown, exclude ALL filters to show ALL available years
years_context = {
'search_query': filter_context.get('search_query'),
}
result._populate_years_from_filtered_dataset(
query_to_use,
**years_context
)
return result, total_count
def _populate_themes(self) -> None:
"""Populate themes list from the current records"""
themes = set()
for record in self.records:
if hasattr(record, 'theme') and hasattr(record.theme, 'name'):
themes.add(record.theme.name)
self.themes = list(themes)
self.themes.sort()
def _populate_years(self) -> None:
"""Populate years list from the current records"""
years = set()
for record in self.records:
if hasattr(record, 'fields') and hasattr(record.fields, 'year') and record.fields.year:
years.add(record.fields.year)
self.years = list(years)
self.years.sort(reverse=True) # Most recent years first
def _theme_name_to_id(self, theme_name_or_id: str) -> str | None:
"""Convert a theme name or ID to theme ID for filtering"""
try:
# Check if the input is already a numeric theme ID
if theme_name_or_id.isdigit():
# Input is already a theme ID, validate it exists
theme_list = BrickThemeList()
theme_id = int(theme_name_or_id)
if theme_id in theme_list.themes:
return str(theme_id)
else:
return None
# Input is a theme name, convert to ID
from .sql import BrickSQL
theme_list = BrickThemeList()
# Find all theme IDs that match the name
matching_theme_ids = []
for theme_id, theme in theme_list.themes.items():
if theme.name.lower() == theme_name_or_id.lower():
matching_theme_ids.append(str(theme_id))
if not matching_theme_ids:
return None
# If only one match, return it
if len(matching_theme_ids) == 1:
return matching_theme_ids[0]
# Multiple matches - check which theme ID actually has sets in the user's collection
sql = BrickSQL()
for theme_id in matching_theme_ids:
result = sql.fetchone(
'set/check_theme_exists',
theme_id=theme_id
)
count = result['count'] if result else 0
if count > 0:
return theme_id
# If none have sets, return the first match (fallback)
return matching_theme_ids[0]
except Exception:
# If themes can't be loaded, return None to disable theme filtering
return None
def _theme_id_to_name(self, theme_id: str) -> str | None:
"""Convert a theme ID to theme name (lowercase) for dropdown display"""
try:
if not theme_id or not theme_id.isdigit():
return None
from .theme_list import BrickThemeList
theme_list = BrickThemeList()
theme_id_int = int(theme_id)
if theme_id_int in theme_list.themes:
return theme_list.themes[theme_id_int].name.lower()
return None
except Exception as e:
# For debugging - log the exception
import logging
logger = logging.getLogger(__name__)
logger.warning(f"Failed to convert theme ID {theme_id} to name: {e}")
return None
def _all_filtered_paginated_with_instructions(
self,
search_query: str | None,
page: int,
per_page: int,
sort_field: str | None,
sort_order: str,
status_filter: str,
theme_id_filter: str | None,
owner_filter: str | None,
purchase_location_filter: str | None,
storage_filter: str | None,
tag_filter: str | None
) -> tuple[Self, int]:
"""Handle filtering when instructions filter is involved"""
try:
# Load all sets first (without pagination) with full metadata
all_sets = BrickSetList()
filter_context = {
'owners': BrickSetOwnerList.as_columns(),
'statuses': BrickSetStatusList.as_columns(),
'tags': BrickSetTagList.as_columns(),
}
all_sets.list(do_theme=True, **filter_context)
# Load instructions list
instructions_list = BrickInstructionsList()
instruction_sets = set(instructions_list.sets.keys())
# Apply all filters manually
filtered_records = []
for record in all_sets.records:
# Apply instructions filter
set_id = record.fields.set
has_instructions = set_id in instruction_sets
if status_filter == 'has-missing-instructions' and has_instructions:
continue # Skip sets that have instructions
elif status_filter == '-has-missing-instructions' and not has_instructions:
continue # Skip sets that don't have instructions
# Apply other filters manually
if search_query and not self._matches_search(record, search_query):
continue
if theme_id_filter and not self._matches_theme(record, theme_id_filter):
continue
if owner_filter and not self._matches_owner(record, owner_filter):
continue
if purchase_location_filter and not self._matches_purchase_location(record, purchase_location_filter):
continue
if storage_filter and not self._matches_storage(record, storage_filter):
continue
if tag_filter and not self._matches_tag(record, tag_filter):
continue
filtered_records.append(record)
# Apply sorting
if sort_field:
filtered_records = self._sort_records(filtered_records, sort_field, sort_order)
# Calculate pagination
total_count = len(filtered_records)
start_index = (page - 1) * per_page
end_index = start_index + per_page
paginated_records = filtered_records[start_index:end_index]
# Create result
result = BrickSetList()
result.records = paginated_records
# Copy themes and years from the source that has all sets
result.themes = all_sets.themes if hasattr(all_sets, 'themes') else []
result.years = all_sets.years if hasattr(all_sets, 'years') else []
# If themes or years weren't populated, populate them from current records
if not result.themes:
result._populate_themes()
if not result.years:
result._populate_years()
return result, total_count
except Exception:
# Fall back to normal pagination without instructions filter
return self.all_filtered_paginated(
search_query, page, per_page, sort_field, sort_order,
None, theme_id_filter, owner_filter,
purchase_location_filter, storage_filter, tag_filter
)
def _populate_years_from_filtered_dataset(self, query_name: str, **filter_context) -> None:
"""Populate years list from all available records in filtered dataset"""
try:
# Use a simplified query to get just distinct years
years_context = dict(filter_context)
years_context.pop('limit', None)
years_context.pop('offset', None)
# Use a special lightweight query for years
year_records = super().select(
override_query='set/list/years_only',
**years_context
)
# Extract years from records
years = set()
for record in year_records:
year = record['year'] if 'year' in record.keys() else None
if year:
years.add(year)
if years:
self.years = list(years)
self.years.sort(reverse=True) # Most recent years first
else:
import logging
logger = logging.getLogger(__name__)
logger.warning("No years found in filtered dataset, falling back to current page")
self._populate_years()
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Exception in _populate_years_from_filtered_dataset: {e}")
self._populate_years()
def _populate_themes_from_filtered_dataset(self, query_name: str, **filter_context) -> None:
"""Populate themes list from filtered dataset (all pages, not just current page)"""
try:
from .theme_list import BrickThemeList
# Use a simplified query to get just distinct theme_ids
theme_context = dict(filter_context)
theme_context.pop('limit', None)
theme_context.pop('offset', None)
# Use a special lightweight query for themes
theme_records = super().select(
override_query='set/list/themes_only',
**theme_context
)
# Convert to theme names
theme_list = BrickThemeList()
themes = set()
for record in theme_records:
theme_id = record.get('theme_id')
if theme_id:
theme = theme_list.get(theme_id)
if theme and hasattr(theme, 'name'):
themes.add(theme.name)
self.themes = list(themes)
self.themes.sort()
except Exception:
# Fall back to simpler approach: get themes from ALL sets (ignoring filters)
# This is better than showing only current page themes
try:
from .theme_list import BrickThemeList
all_sets = BrickSetList()
all_sets.list(do_theme=True)
themes = set()
years = set()
for record in all_sets.records:
if hasattr(record, 'theme') and hasattr(record.theme, 'name'):
themes.add(record.theme.name)
if hasattr(record, 'fields') and hasattr(record.fields, 'year') and record.fields.year:
years.add(record.fields.year)
self.themes = list(themes)
self.themes.sort()
self.years = list(years)
self.years.sort(reverse=True)
except Exception:
# Final fallback to current page themes
self._populate_themes()
self._populate_years()
def _matches_search(self, record, search_query: str) -> bool:
"""Check if record matches search query"""
search_lower = search_query.lower()
return (search_lower in record.fields.name.lower() or
search_lower in record.fields.set.lower())
def _matches_theme(self, record, theme_id: str) -> bool:
"""Check if record matches theme filter"""
return str(record.fields.theme_id) == theme_id
def _matches_owner(self, record, owner_filter: str) -> bool:
"""Check if record matches owner filter"""
if not owner_filter.startswith('owner-'):
return True
# Convert owner-uuid format to owner_uuid column name
owner_column = owner_filter.replace('-', '_')
# Check if record has this owner attribute set to 1
return hasattr(record.fields, owner_column) and getattr(record.fields, owner_column) == 1
def _matches_purchase_location(self, record, location_filter: str) -> bool:
"""Check if record matches purchase location filter"""
return record.fields.purchase_location == location_filter
def _matches_storage(self, record, storage_filter: str) -> bool:
"""Check if record matches storage filter"""
return record.fields.storage == storage_filter
def _matches_tag(self, record, tag_filter: str) -> bool:
"""Check if record matches tag filter"""
if not tag_filter.startswith('tag-'):
return True
# Convert tag-uuid format to tag_uuid column name
tag_column = tag_filter.replace('-', '_')
# Check if record has this tag attribute set to 1
return hasattr(record.fields, tag_column) and getattr(record.fields, tag_column) == 1
def _sort_records(self, records, sort_field: str, sort_order: str):
"""Sort records manually"""
reverse = sort_order == 'desc'
if sort_field == 'set':
return sorted(records, key=lambda r: self._set_sort_key(r.fields.set), reverse=reverse)
elif sort_field == 'name':
return sorted(records, key=lambda r: r.fields.name, reverse=reverse)
elif sort_field == 'year':
return sorted(records, key=lambda r: r.fields.year, reverse=reverse)
elif sort_field == 'parts':
return sorted(records, key=lambda r: r.fields.number_of_parts, reverse=reverse)
# Add more sort fields as needed
return records
def _set_sort_key(self, set_number: str) -> tuple:
"""Generate sort key for set numbers like '10121-1' -> (10121, 1)"""
try:
if '-' in set_number:
main_part, version_part = set_number.split('-', 1)
return (int(main_part), int(version_part))
else:
return (int(set_number), 0)
except (ValueError, TypeError):
# Fallback to string sorting if parsing fails
return (float('inf'), set_number)
# Sets with a minifigure part damaged # Sets with a minifigure part damaged
def damaged_minifigure(self, figure: str, /) -> Self: def damaged_minifigure(self, figure: str, /) -> Self:
# Save the parameters to the fields # Save the parameters to the fields
@@ -93,6 +591,7 @@ class BrickSetList(BrickRecordList[BrickSet]):
**context: Any, **context: Any,
) -> None: ) -> None:
themes = set() themes = set()
years = set()
if order is None: if order is None:
order = self.order order = self.order
@@ -102,20 +601,22 @@ class BrickSetList(BrickRecordList[BrickSet]):
override_query=override_query, override_query=override_query,
order=order, order=order,
limit=limit, limit=limit,
owners=BrickSetOwnerList.as_columns(), **context
statuses=BrickSetStatusList.as_columns(),
tags=BrickSetTagList.as_columns(),
): ):
brickset = BrickSet(record=record) brickset = BrickSet(record=record)
self.records.append(brickset) self.records.append(brickset)
if do_theme: if do_theme:
themes.add(brickset.theme.name) themes.add(brickset.theme.name)
if hasattr(brickset, 'fields') and hasattr(brickset.fields, 'year') and brickset.fields.year:
years.add(brickset.fields.year)
# Convert the set into a list and sort it # Convert the set into a list and sort it
if do_theme: if do_theme:
self.themes = list(themes) self.themes = list(themes)
self.themes.sort() self.themes.sort()
self.years = list(years)
self.years.sort(reverse=True) # Most recent years first
# Sets missing a minifigure part # Sets missing a minifigure part
def missing_minifigure(self, figure: str, /) -> Self: def missing_minifigure(self, figure: str, /) -> Self:

View File

@@ -6,6 +6,8 @@ from flask_socketio import SocketIO
from .instructions import BrickInstructions from .instructions import BrickInstructions
from .instructions_list import BrickInstructionsList from .instructions_list import BrickInstructionsList
from .peeron_instructions import PeeronInstructions, PeeronPage
from .peeron_pdf import PeeronPDF
from .set import BrickSet from .set import BrickSet
from .socket_decorator import authenticated_socket, rebrickable_socket from .socket_decorator import authenticated_socket, rebrickable_socket
from .sql import close as sql_close from .sql import close as sql_close
@@ -18,8 +20,10 @@ MESSAGES: Final[dict[str, str]] = {
'CONNECT': 'connect', 'CONNECT': 'connect',
'DISCONNECT': 'disconnect', 'DISCONNECT': 'disconnect',
'DOWNLOAD_INSTRUCTIONS': 'download_instructions', 'DOWNLOAD_INSTRUCTIONS': 'download_instructions',
'DOWNLOAD_PEERON_PAGES': 'download_peeron_pages',
'FAIL': 'fail', 'FAIL': 'fail',
'IMPORT_SET': 'import_set', 'IMPORT_SET': 'import_set',
'LOAD_PEERON_PAGES': 'load_peeron_pages',
'LOAD_SET': 'load_set', 'LOAD_SET': 'load_set',
'PROGRESS': 'progress', 'PROGRESS': 'progress',
'SET_LOADED': 'set_loaded', 'SET_LOADED': 'set_loaded',
@@ -71,6 +75,9 @@ class BrickSocket(object):
**kwargs, **kwargs,
path=app.config['SOCKET_PATH'], path=app.config['SOCKET_PATH'],
async_mode='gevent', async_mode='gevent',
# Ping/pong settings for mobile network resilience
ping_timeout=30, # Wait 30s for pong response before disconnecting
ping_interval=25, # Send ping every 25s to keep connection alive
) )
# Store the socket in the app config # Store the socket in the app config
@@ -82,9 +89,23 @@ class BrickSocket(object):
self.connected() self.connected()
@self.socket.on(MESSAGES['DISCONNECT'], namespace=self.namespace) @self.socket.on(MESSAGES['DISCONNECT'], namespace=self.namespace)
def disconnect() -> None: def disconnect(reason=None) -> None:
self.disconnected() self.disconnected()
@self.socket.on('connect_error', namespace=self.namespace)
def connect_error(data) -> None:
logger.error(f'Socket CONNECT_ERROR: {data}')
@self.socket.on_error(namespace=self.namespace)
def error_handler(e) -> None:
logger.error(f'Socket ERROR: {e}')
try:
user_agent = request.headers.get('User-Agent', 'unknown')
remote_addr = request.remote_addr
logger.error(f'Socket ERROR details: ip={remote_addr}, ua={user_agent[:80]}...')
except Exception:
pass
@self.socket.on(MESSAGES['DOWNLOAD_INSTRUCTIONS'], namespace=self.namespace) # noqa: E501 @self.socket.on(MESSAGES['DOWNLOAD_INSTRUCTIONS'], namespace=self.namespace) # noqa: E501
@authenticated_socket(self) @authenticated_socket(self)
def download_instructions(data: dict[str, Any], /) -> None: def download_instructions(data: dict[str, Any], /) -> None:
@@ -106,6 +127,84 @@ class BrickSocket(object):
BrickInstructionsList(force=True) BrickInstructionsList(force=True)
@self.socket.on(MESSAGES['LOAD_PEERON_PAGES'], namespace=self.namespace) # noqa: E501
def load_peeron_pages(data: dict[str, Any], /) -> None:
logger.debug('Socket: LOAD_PEERON_PAGES={data} (from: {fr})'.format(
data=data, fr=request.remote_addr))
try:
set_number = data.get('set', '')
if not set_number:
self.fail(message="Set number is required")
return
# Create Peeron instructions instance with socket for progress reporting
peeron = PeeronInstructions(set_number, socket=self)
# Find pages (this will report progress for thumbnail caching)
pages = peeron.find_pages()
# Complete the operation (JavaScript will handle redirect)
self.complete(message=f"Found {len(pages)} instruction pages on Peeron")
except Exception as e:
logger.error(f"Error in load_peeron_pages: {e}")
self.fail(message=f"Error loading Peeron pages: {e}")
@self.socket.on(MESSAGES['DOWNLOAD_PEERON_PAGES'], namespace=self.namespace) # noqa: E501
@authenticated_socket(self)
def download_peeron_pages(data: dict[str, Any], /) -> None:
logger.debug('Socket: DOWNLOAD_PEERON_PAGES={data} (from: {fr})'.format(
data=data,
fr=request.sid, # type: ignore
))
try:
# Extract data from the request
set_number = data.get('set', '')
pages_data = data.get('pages', [])
if not set_number:
raise ValueError("Set number is required")
if not pages_data:
raise ValueError("No pages selected")
# Parse set number
if '-' in set_number:
parts = set_number.split('-', 1)
set_num = parts[0]
version_num = parts[1] if len(parts) > 1 else '1'
else:
set_num = set_number
version_num = '1'
# Convert page data to PeeronPage objects
pages = []
for page_data in pages_data:
page = PeeronPage(
page_number=page_data.get('page_number', ''),
original_image_url=page_data.get('original_image_url', ''),
cached_full_image_path=page_data.get('cached_full_image_path', ''),
cached_thumbnail_url='', # Not needed for PDF generation
alt_text=page_data.get('alt_text', ''),
rotation=page_data.get('rotation', 0)
)
pages.append(page)
# Create PDF generator and start download
pdf_generator = PeeronPDF(set_num, version_num, pages, socket=self)
pdf_generator.create_pdf()
# Note: Cache cleanup is handled automatically by pdf_generator.create_pdf()
# Refresh instructions list to include new PDF
BrickInstructionsList(force=True)
except Exception as e:
logger.error(f"Error in download_peeron_pages: {e}")
self.fail(message=f"Error downloading Peeron pages: {e}")
@self.socket.on(MESSAGES['IMPORT_SET'], namespace=self.namespace) @self.socket.on(MESSAGES['IMPORT_SET'], namespace=self.namespace)
@rebrickable_socket(self) @rebrickable_socket(self)
def import_set(data: dict[str, Any], /) -> None: def import_set(data: dict[str, Any], /) -> None:
@@ -150,13 +249,32 @@ class BrickSocket(object):
# Socket is connected # Socket is connected
def connected(self, /) -> Tuple[str, int]: def connected(self, /) -> Tuple[str, int]:
logger.debug('Socket: client connected') # Get detailed connection info for debugging
try:
sid = request.sid # type: ignore
transport = request.environ.get('HTTP_UPGRADE', 'polling')
user_agent = request.headers.get('User-Agent', 'unknown')
remote_addr = request.remote_addr
# Check if it's likely a mobile device
is_mobile = any(x in user_agent.lower() for x in ['iphone', 'ipad', 'android', 'mobile'])
logger.info(
f'Socket CONNECTED: sid={sid}, transport={transport}, '
f'ip={remote_addr}, mobile={is_mobile}, ua={user_agent[:80]}...'
)
except Exception as e:
logger.warning(f'Socket connected but failed to get details: {e}')
return '', 301 return '', 301
# Socket is disconnected # Socket is disconnected
def disconnected(self, /) -> None: def disconnected(self, /) -> None:
logger.debug('Socket: client disconnected') try:
sid = request.sid # type: ignore
logger.info(f'Socket DISCONNECTED: sid={sid}')
except Exception as e:
logger.info(f'Socket disconnected (sid unavailable): {e}')
# Emit a message through the socket # Emit a message through the socket
def emit(self, name: str, *arg, all=False) -> None: def emit(self, name: str, *arg, all=False) -> None:

View File

@@ -60,6 +60,29 @@ class BrickSQL(object):
# Grab a cursor # Grab a cursor
self.cursor = self.connection.cursor() self.cursor = self.connection.cursor()
# SQLite Performance Optimizations
logger.debug('SQLite3: applying performance optimizations')
# Enable WAL (Write-Ahead Logging) mode for better concurrency
# Allows multiple readers while writer is active
self.connection.execute('PRAGMA journal_mode=WAL')
# Increase cache size for better query performance
# Default is 2000 pages, increase to 10000 pages (~40MB for 4KB pages)
self.connection.execute('PRAGMA cache_size=10000')
# Store temporary tables and indices in memory for speed
self.connection.execute('PRAGMA temp_store=memory')
# Enable foreign key constraints (good practice)
self.connection.execute('PRAGMA foreign_keys=ON')
# Optimize for read performance (trade write speed for read speed)
self.connection.execute('PRAGMA synchronous=NORMAL')
# Analyze database statistics for better query planning
self.connection.execute('ANALYZE')
# Grab the version and check # Grab the version and check
try: try:
version = self.fetchone('schema/get_version') version = self.fetchone('schema/get_version')

View File

@@ -0,0 +1,9 @@
-- description: Add checked field to bricktracker_parts table for part walkthrough tracking
BEGIN TRANSACTION;
-- Add checked field to the bricktracker_parts table
-- This allows users to track which parts they have checked during walkthroughs
ALTER TABLE "bricktracker_parts" ADD COLUMN "checked" BOOLEAN DEFAULT 0;
COMMIT;

View File

@@ -0,0 +1,56 @@
-- description: Performance optimization indexes
-- High-impact composite index for problem parts aggregation
-- Used in set listings, statistics, and problem reports
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_id_missing_damaged
ON bricktracker_parts(id, missing, damaged);
-- Composite index for parts lookup by part and color
-- Used in part listings and filtering operations
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_part_color_spare
ON bricktracker_parts(part, color, spare);
-- Composite index for set storage filtering
-- Used in set listings filtered by storage location
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_set_storage
ON bricktracker_sets("set", storage);
-- Search optimization index for set names
-- Improves text search performance on set listings
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_name_lower
ON rebrickable_sets(LOWER(name));
-- Search optimization index for part names
-- Improves text search performance on part listings
CREATE INDEX IF NOT EXISTS idx_rebrickable_parts_name_lower
ON rebrickable_parts(LOWER(name));
-- Additional indexes for common join patterns
-- Set purchase filtering
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_purchase_location
ON bricktracker_sets(purchase_location);
-- Parts quantity filtering
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_quantity
ON bricktracker_parts(quantity);
-- Year-based filtering optimization
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_year
ON rebrickable_sets(year);
-- Theme-based filtering optimization
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_theme_id
ON rebrickable_sets(theme_id);
-- Rebrickable sets number and version for sorting
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_number_version
ON rebrickable_sets(number, version);
-- Purchase date filtering and sorting
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_purchase_date
ON bricktracker_sets(purchase_date);
-- Minifigures aggregation optimization
CREATE INDEX IF NOT EXISTS idx_bricktracker_minifigures_id_quantity
ON bricktracker_minifigures(id, quantity);

View File

@@ -0,0 +1,58 @@
-- description: Change set number column from INTEGER to TEXT to support alphanumeric set numbers
-- Temporarily disable foreign key constraints for this migration
-- This is necessary because we're recreating a table that other tables reference
-- We verify integrity at the end to ensure safety
PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
-- Create new table with TEXT number column
CREATE TABLE "rebrickable_sets_new" (
"set" TEXT NOT NULL,
"number" TEXT NOT NULL,
"version" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"year" INTEGER NOT NULL,
"theme_id" INTEGER NOT NULL,
"number_of_parts" INTEGER NOT NULL,
"image" TEXT,
"url" TEXT,
"last_modified" TEXT,
PRIMARY KEY("set")
);
-- Copy all data from old table to new table
-- Cast INTEGER number to TEXT explicitly
INSERT INTO "rebrickable_sets_new"
SELECT
"set",
CAST("number" AS TEXT),
"version",
"name",
"year",
"theme_id",
"number_of_parts",
"image",
"url",
"last_modified"
FROM "rebrickable_sets";
-- Drop old table
DROP TABLE "rebrickable_sets";
-- Rename new table to original name
ALTER TABLE "rebrickable_sets_new" RENAME TO "rebrickable_sets";
-- Recreate the index
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_number_version
ON rebrickable_sets(number, version);
-- Verify foreign key integrity before committing
-- This ensures we haven't broken any references
PRAGMA foreign_key_check;
COMMIT;
-- Re-enable foreign key constraints
PRAGMA foreign_keys=ON;

View File

@@ -28,6 +28,8 @@ ON "bricktracker_minifigures"."figure" IS NOT DISTINCT FROM "rebrickable_minifig
{% block group %}{% endblock %} {% block group %}{% endblock %}
{% block having %}{% endblock %}
{% if order %} {% if order %}
ORDER BY {{ order }} ORDER BY {{ order }}
{% endif %} {% endif %}
@@ -35,3 +37,7 @@ ORDER BY {{ order }}
{% if limit %} {% if limit %}
LIMIT {{ limit }} LIMIT {{ limit }}
{% endif %} {% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}

View File

@@ -17,6 +17,14 @@ IFNULL(COUNT("bricktracker_minifigures"."id"), 0) AS "total_sets"
{% endblock %} {% endblock %}
{% block join %} {% block join %}
{% if theme_id or year %}
-- Join with sets for theme/year filtering
INNER JOIN "bricktracker_sets" AS "filter_sets"
ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "filter_sets"."id"
INNER JOIN "rebrickable_sets" AS "filter_rs"
ON "filter_sets"."set" IS NOT DISTINCT FROM "filter_rs"."set"
{% endif %}
-- LEFT JOIN + SELECT to avoid messing the total -- LEFT JOIN + SELECT to avoid messing the total
LEFT JOIN ( LEFT JOIN (
SELECT SELECT
@@ -34,6 +42,32 @@ ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "problem_join"."id"
AND "rebrickable_minifigures"."figure" IS NOT DISTINCT FROM "problem_join"."figure" AND "rebrickable_minifigures"."figure" IS NOT DISTINCT FROM "problem_join"."figure"
{% endblock %} {% endblock %}
{% block where %}
WHERE 1=1
{% if theme_id and theme_id != 'all' %}
AND "filter_rs"."theme_id" = {{ theme_id }}
{% endif %}
{% if year and year != 'all' %}
AND "filter_rs"."year" = {{ year }}
{% endif %}
{% if search_query %}
AND (LOWER("rebrickable_minifigures"."name") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% endblock %}
{% block having %}
{% if problems_filter %}
HAVING 1=1
{% if problems_filter == 'missing' %}
AND SUM(IFNULL("problem_join"."total_missing", 0)) > 0
{% elif problems_filter == 'damaged' %}
AND SUM(IFNULL("problem_join"."total_damaged", 0)) > 0
{% elif problems_filter == 'both' %}
AND SUM(IFNULL("problem_join"."total_missing", 0)) > 0 AND SUM(IFNULL("problem_join"."total_damaged", 0)) > 0
{% endif %}
{% endif %}
{% endblock %}
{% block group %} {% block group %}
GROUP BY GROUP BY
"rebrickable_minifigures"."figure" "rebrickable_minifigures"."figure"

View File

@@ -29,6 +29,10 @@ COUNT("bricktracker_minifigures"."id") AS "total_sets"
INNER JOIN "bricktracker_sets" INNER JOIN "bricktracker_sets"
ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id" ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
-- Join with rebrickable sets for theme/year filtering
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
-- Left join with set owners (using dynamic columns) -- Left join with set owners (using dynamic columns)
LEFT JOIN "bricktracker_set_owners" LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id" ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
@@ -60,12 +64,38 @@ AND "rebrickable_minifigures"."figure" IS NOT DISTINCT FROM "problem_join"."figu
{% endblock %} {% endblock %}
{% block where %} {% block where %}
{% set conditions = [] %}
{% if owner_id and owner_id != 'all' %} {% if owner_id and owner_id != 'all' %}
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 {% set _ = conditions.append('"bricktracker_set_owners"."owner_' ~ owner_id ~ '" = 1') %}
{% endif %}
{% if theme_id and theme_id != 'all' %}
{% set _ = conditions.append('"rebrickable_sets"."theme_id" = ' ~ theme_id) %}
{% endif %}
{% if year and year != 'all' %}
{% set _ = conditions.append('"rebrickable_sets"."year" = ' ~ year) %}
{% endif %}
{% if search_query %}
{% set _ = conditions.append('(LOWER("rebrickable_minifigures"."name") LIKE LOWER(\'%' ~ search_query ~ '%\'))') %}
{% endif %}
{% if conditions %}
WHERE {{ conditions | join(' AND ') }}
{% endif %} {% endif %}
{% endblock %} {% endblock %}
{% block group %} {% block group %}
GROUP BY GROUP BY
"rebrickable_minifigures"."figure" "rebrickable_minifigures"."figure"
{% endblock %}
{% block having %}
{% if problems_filter %}
HAVING 1=1
{% if problems_filter == 'missing' %}
AND SUM(IFNULL("problem_join"."total_missing", 0)) > 0
{% elif problems_filter == 'damaged' %}
AND SUM(IFNULL("problem_join"."total_damaged", 0)) > 0
{% elif problems_filter == 'both' %}
AND SUM(IFNULL("problem_join"."total_missing", 0)) > 0 AND SUM(IFNULL("problem_join"."total_damaged", 0)) > 0
{% endif %}
{% endif %}
{% endblock %} {% endblock %}

View File

@@ -0,0 +1,16 @@
-- Get distinct themes from minifigures' sets
SELECT DISTINCT
"rebrickable_sets"."theme_id",
COUNT(DISTINCT "bricktracker_minifigures"."figure") as "minifigure_count"
FROM "bricktracker_minifigures"
INNER JOIN "bricktracker_sets"
ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."theme_id"
ORDER BY "rebrickable_sets"."theme_id" ASC

View File

@@ -0,0 +1,16 @@
-- Get distinct years from minifigures' sets
SELECT DISTINCT
"rebrickable_sets"."year",
COUNT(DISTINCT "bricktracker_minifigures"."figure") as "minifigure_count"
FROM "bricktracker_minifigures"
INNER JOIN "bricktracker_sets"
ON "bricktracker_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."year"
ORDER BY "rebrickable_sets"."year" DESC

View File

@@ -9,6 +9,7 @@ SELECT
--"bricktracker_parts"."rebrickable_inventory", --"bricktracker_parts"."rebrickable_inventory",
"bricktracker_parts"."missing", "bricktracker_parts"."missing",
"bricktracker_parts"."damaged", "bricktracker_parts"."damaged",
"bricktracker_parts"."checked",
--"rebrickable_parts"."part", --"rebrickable_parts"."part",
--"rebrickable_parts"."color_id", --"rebrickable_parts"."color_id",
"rebrickable_parts"."color_name", "rebrickable_parts"."color_name",
@@ -60,3 +61,7 @@ ORDER BY {{ order }}
{% if limit %} {% if limit %}
LIMIT {{ limit }} LIMIT {{ limit }}
{% endif %} {% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}

View File

@@ -0,0 +1,19 @@
SELECT DISTINCT
"rebrickable_parts"."color_id" AS "color_id",
"rebrickable_parts"."color_name" AS "color_name",
"rebrickable_parts"."color_rgb" AS "color_rgb"
FROM "rebrickable_parts"
INNER JOIN "bricktracker_parts"
ON "bricktracker_parts"."part" IS NOT DISTINCT FROM "rebrickable_parts"."part"
AND "bricktracker_parts"."color" IS NOT DISTINCT FROM "rebrickable_parts"."color_id"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
{% endif %}
WHERE ("bricktracker_parts"."missing" > 0 OR "bricktracker_parts"."damaged" > 0)
{% if owner_id and owner_id != 'all' %}
AND "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
ORDER BY "rebrickable_parts"."color_name" ASC

View File

@@ -24,11 +24,35 @@ SUM(IFNULL("bricktracker_minifigures"."quantity", 0)) AS "total_minifigures"
LEFT JOIN "bricktracker_minifigures" LEFT JOIN "bricktracker_minifigures"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_minifigures"."id" ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_minifigures"."id"
AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM "bricktracker_minifigures"."figure" AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM "bricktracker_minifigures"."figure"
{% if theme_id or year %}
INNER JOIN "bricktracker_sets" AS "filter_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "filter_sets"."id"
INNER JOIN "rebrickable_sets" AS "filter_rs"
ON "filter_sets"."set" IS NOT DISTINCT FROM "filter_rs"."set"
{% endif %}
{% endblock %} {% endblock %}
{% block where %} {% block where %}
{% set conditions = [] %}
{% if color_id and color_id != 'all' %} {% if color_id and color_id != 'all' %}
WHERE "bricktracker_parts"."color" = {{ color_id }} {% set _ = conditions.append('"bricktracker_parts"."color" = ' ~ color_id) %}
{% endif %}
{% if theme_id and theme_id != 'all' %}
{% set _ = conditions.append('"filter_rs"."theme_id" = ' ~ theme_id) %}
{% endif %}
{% if year and year != 'all' %}
{% set _ = conditions.append('"filter_rs"."year" = ' ~ year) %}
{% endif %}
{% if search_query %}
{% set search_condition = '(LOWER("rebrickable_parts"."name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("rebrickable_parts"."color_name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("bricktracker_parts"."part") LIKE LOWER(\'%' ~ search_query ~ '%\'))' %}
{% set _ = conditions.append(search_condition) %}
{% endif %}
{% if skip_spare_parts %}
{% set _ = conditions.append('"bricktracker_parts"."spare" = 0') %}
{% endif %}
{% if conditions %}
WHERE {{ conditions | join(' AND ') }}
{% endif %} {% endif %}
{% endblock %} {% endblock %}

View File

@@ -56,17 +56,22 @@ AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM "bricktracker_minifigures
{% endblock %} {% endblock %}
{% block where %} {% block where %}
{% set has_where = false %} {% set conditions = [] %}
{% if owner_id and owner_id != 'all' %} {% if owner_id and owner_id != 'all' %}
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 {% set _ = conditions.append('"bricktracker_set_owners"."owner_' ~ owner_id ~ '" = 1') %}
{% set has_where = true %}
{% endif %} {% endif %}
{% if color_id and color_id != 'all' %} {% if color_id and color_id != 'all' %}
{% if has_where %} {% set _ = conditions.append('"bricktracker_parts"."color" = ' ~ color_id) %}
AND "bricktracker_parts"."color" = {{ color_id }}
{% else %}
WHERE "bricktracker_parts"."color" = {{ color_id }}
{% endif %} {% endif %}
{% if search_query %}
{% set search_condition = '(LOWER("rebrickable_parts"."name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("rebrickable_parts"."color_name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("bricktracker_parts"."part") LIKE LOWER(\'%' ~ search_query ~ '%\'))' %}
{% set _ = conditions.append(search_condition) %}
{% endif %}
{% if skip_spare_parts %}
{% set _ = conditions.append('"bricktracker_parts"."spare" = 0') %}
{% endif %}
{% if conditions %}
WHERE {{ conditions | join(' AND ') }}
{% endif %} {% endif %}
{% endblock %} {% endblock %}

View File

@@ -10,7 +10,12 @@ SUM("bricktracker_parts"."damaged") AS "total_damaged",
{% endblock %} {% endblock %}
{% block where %} {% block where %}
WHERE "bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure {% set conditions = [] %}
{% set _ = conditions.append('"bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure') %}
{% if skip_spare_parts %}
{% set _ = conditions.append('"bricktracker_parts"."spare" = 0') %}
{% endif %}
WHERE {{ conditions | join(' AND ') }}
{% endblock %} {% endblock %}
{% block group %} {% block group %}

View File

@@ -1,30 +1,100 @@
{% extends 'part/base/base.sql' %} {% extends 'part/base/base.sql' %}
{% block total_missing %} {% block total_missing %}
{% if owner_id and owner_id != 'all' %}
SUM(CASE WHEN "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."missing" ELSE 0 END) AS "total_missing",
{% else %}
SUM("bricktracker_parts"."missing") AS "total_missing", SUM("bricktracker_parts"."missing") AS "total_missing",
{% endif %}
{% endblock %} {% endblock %}
{% block total_damaged %} {% block total_damaged %}
{% if owner_id and owner_id != 'all' %}
SUM(CASE WHEN "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."damaged" ELSE 0 END) AS "total_damaged",
{% else %}
SUM("bricktracker_parts"."damaged") AS "total_damaged", SUM("bricktracker_parts"."damaged") AS "total_damaged",
{% endif %}
{% endblock %}
{% block total_quantity %}
{% if owner_id and owner_id != 'all' %}
SUM(CASE WHEN "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."quantity" * IFNULL("bricktracker_minifigures"."quantity", 1) ELSE 0 END) AS "total_quantity",
{% else %}
SUM("bricktracker_parts"."quantity" * IFNULL("bricktracker_minifigures"."quantity", 1)) AS "total_quantity",
{% endif %}
{% endblock %} {% endblock %}
{% block total_sets %} {% block total_sets %}
IFNULL(COUNT("bricktracker_parts"."id"), 0) - IFNULL(COUNT("bricktracker_parts"."figure"), 0) AS "total_sets", {% if owner_id and owner_id != 'all' %}
COUNT(DISTINCT CASE WHEN "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."id" ELSE NULL END) AS "total_sets",
{% else %}
COUNT(DISTINCT "bricktracker_parts"."id") AS "total_sets",
{% endif %}
{% endblock %} {% endblock %}
{% block total_minifigures %} {% block total_minifigures %}
{% if owner_id and owner_id != 'all' %}
SUM(CASE WHEN "bricktracker_set_owners"."owner_{{ owner_id }}" = 1 THEN IFNULL("bricktracker_minifigures"."quantity", 0) ELSE 0 END) AS "total_minifigures"
{% else %}
SUM(IFNULL("bricktracker_minifigures"."quantity", 0)) AS "total_minifigures" SUM(IFNULL("bricktracker_minifigures"."quantity", 0)) AS "total_minifigures"
{% endif %}
{% endblock %} {% endblock %}
{% block join %} {% block join %}
-- Join with sets to get owner information
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
-- Join with rebrickable sets for theme/year filtering
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
-- Left join with set owners (using dynamic columns)
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
-- Left join with set tags (for tag filtering)
{% if tag_id and tag_id != 'all' %}
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_tags"."id"
{% endif %}
-- Left join with minifigures
LEFT JOIN "bricktracker_minifigures" LEFT JOIN "bricktracker_minifigures"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_minifigures"."id" ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_minifigures"."id"
AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM "bricktracker_minifigures"."figure" AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM "bricktracker_minifigures"."figure"
{% endblock %} {% endblock %}
{% block where %} {% block where %}
WHERE "bricktracker_parts"."missing" > 0 {% set conditions = [] %}
OR "bricktracker_parts"."damaged" > 0 -- Always filter for problematic parts
{% set _ = conditions.append('("bricktracker_parts"."missing" > 0 OR "bricktracker_parts"."damaged" > 0)') %}
{% if owner_id and owner_id != 'all' %}
{% set _ = conditions.append('"bricktracker_set_owners"."owner_' ~ owner_id ~ '" = 1') %}
{% endif %}
{% if color_id and color_id != 'all' %}
{% set _ = conditions.append('"bricktracker_parts"."color" = ' ~ color_id) %}
{% endif %}
{% if theme_id and theme_id != 'all' %}
{% set _ = conditions.append('"rebrickable_sets"."theme_id" = ' ~ theme_id) %}
{% endif %}
{% if year and year != 'all' %}
{% set _ = conditions.append('"rebrickable_sets"."year" = ' ~ year) %}
{% endif %}
{% if storage_id and storage_id != 'all' %}
{% set _ = conditions.append('"bricktracker_sets"."storage" = \'' ~ storage_id ~ '\'') %}
{% endif %}
{% if tag_id and tag_id != 'all' %}
{% set _ = conditions.append('"bricktracker_set_tags"."tag_' ~ tag_id ~ '" = 1') %}
{% endif %}
{% if search_query %}
{% set search_condition = '(LOWER("rebrickable_parts"."name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("rebrickable_parts"."color_name") LIKE LOWER(\'%' ~ search_query ~ '%\') OR LOWER("bricktracker_parts"."part") LIKE LOWER(\'%' ~ search_query ~ '%\'))' %}
{% set _ = conditions.append(search_condition) %}
{% endif %}
{% if skip_spare_parts %}
{% set _ = conditions.append('"bricktracker_parts"."spare" = 0') %}
{% endif %}
WHERE {{ conditions | join(' AND ') }}
{% endblock %} {% endblock %}
{% block group %} {% block group %}

View File

@@ -10,6 +10,11 @@ IFNULL("bricktracker_parts"."damaged", 0) AS "total_damaged",
{% endblock %} {% endblock %}
{% block where %} {% block where %}
WHERE "bricktracker_parts"."id" IS NOT DISTINCT FROM :id {% set conditions = [] %}
AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure {% set _ = conditions.append('"bricktracker_parts"."id" IS NOT DISTINCT FROM :id') %}
{% set _ = conditions.append('"bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure') %}
{% if skip_spare_parts %}
{% set _ = conditions.append('"bricktracker_parts"."spare" = 0') %}
{% endif %}
WHERE {{ conditions | join(' AND ') }}
{% endblock %} {% endblock %}

View File

@@ -0,0 +1,21 @@
-- Get distinct storages from problem parts' sets
SELECT DISTINCT
"bricktracker_sets"."storage" AS "storage_id",
"bricktracker_metadata_storages"."name" AS "storage_name",
COUNT(DISTINCT "bricktracker_parts"."part") as "part_count"
FROM "bricktracker_parts"
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
LEFT JOIN "bricktracker_metadata_storages"
ON "bricktracker_sets"."storage" IS NOT DISTINCT FROM "bricktracker_metadata_storages"."id"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
{% endif %}
WHERE ("bricktracker_parts"."missing" > 0 OR "bricktracker_parts"."damaged" > 0)
AND "bricktracker_sets"."storage" IS NOT NULL
{% if owner_id and owner_id != 'all' %}
AND "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "bricktracker_sets"."storage", "bricktracker_metadata_storages"."name"
ORDER BY "bricktracker_metadata_storages"."name" ASC

View File

@@ -0,0 +1,7 @@
-- Get list of all tags (simplified - filtering happens at application level)
-- Tags use dynamic columns in bricktracker_set_tags, making direct SQL filtering complex
SELECT
"bricktracker_metadata_tags"."id" AS "tag_id",
"bricktracker_metadata_tags"."name" AS "tag_name"
FROM "bricktracker_metadata_tags"
ORDER BY "bricktracker_metadata_tags"."name" ASC

View File

@@ -0,0 +1,16 @@
-- Get distinct themes from parts' sets
SELECT DISTINCT
"rebrickable_sets"."theme_id",
COUNT(DISTINCT "bricktracker_parts"."part") as "part_count"
FROM "bricktracker_parts"
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."theme_id"
ORDER BY "rebrickable_sets"."theme_id" ASC

View File

@@ -0,0 +1,19 @@
-- Get distinct themes from problem parts' sets
SELECT DISTINCT
"rebrickable_sets"."theme_id",
COUNT(DISTINCT "bricktracker_parts"."part") as "part_count"
FROM "bricktracker_parts"
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
{% endif %}
WHERE ("bricktracker_parts"."missing" > 0 OR "bricktracker_parts"."damaged" > 0)
{% if owner_id and owner_id != 'all' %}
AND "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."theme_id"
ORDER BY "rebrickable_sets"."theme_id" ASC

View File

@@ -0,0 +1,7 @@
UPDATE "bricktracker_parts"
SET "checked" = :checked
WHERE "bricktracker_parts"."id" IS NOT DISTINCT FROM :id
AND "bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure
AND "bricktracker_parts"."part" IS NOT DISTINCT FROM :part
AND "bricktracker_parts"."color" IS NOT DISTINCT FROM :color
AND "bricktracker_parts"."spare" IS NOT DISTINCT FROM :spare

View File

@@ -0,0 +1,16 @@
-- Get distinct years from parts' sets
SELECT DISTINCT
"rebrickable_sets"."year",
COUNT(DISTINCT "bricktracker_parts"."part") as "part_count"
FROM "bricktracker_parts"
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
WHERE "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."year"
ORDER BY "rebrickable_sets"."year" DESC

View File

@@ -0,0 +1,19 @@
-- Get distinct years from problem parts' sets
SELECT DISTINCT
"rebrickable_sets"."year",
COUNT(DISTINCT "bricktracker_parts"."part") as "part_count"
FROM "bricktracker_parts"
INNER JOIN "bricktracker_sets"
ON "bricktracker_parts"."id" IS NOT DISTINCT FROM "bricktracker_sets"."id"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% if owner_id and owner_id != 'all' %}
INNER JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
{% endif %}
WHERE ("bricktracker_parts"."missing" > 0 OR "bricktracker_parts"."damaged" > 0)
{% if owner_id and owner_id != 'all' %}
AND "bricktracker_set_owners"."owner_{{ owner_id }}" = 1
{% endif %}
GROUP BY "rebrickable_sets"."year"
ORDER BY "rebrickable_sets"."year" DESC

View File

@@ -49,3 +49,7 @@ ORDER BY {{ order }}
{% if limit %} {% if limit %}
LIMIT {{ limit }} LIMIT {{ limit }}
{% endif %} {% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}

View File

@@ -0,0 +1,4 @@
SELECT COUNT(*) as count
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
WHERE "rebrickable_sets"."theme_id" = {{ theme_id }}

View File

@@ -3,22 +3,24 @@
BEGIN TRANSACTION; BEGIN TRANSACTION;
DELETE FROM "bricktracker_sets" -- Delete child records first (those with foreign keys referencing bricktracker_sets)
WHERE "bricktracker_sets"."id" IS NOT DISTINCT FROM '{{ id }}'; DELETE FROM "bricktracker_parts"
WHERE "bricktracker_parts"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_owners"
WHERE "bricktracker_set_owners"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_statuses"
WHERE "bricktracker_set_statuses"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_tags"
WHERE "bricktracker_set_tags"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_minifigures" DELETE FROM "bricktracker_minifigures"
WHERE "bricktracker_minifigures"."id" IS NOT DISTINCT FROM '{{ id }}'; WHERE "bricktracker_minifigures"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_parts" DELETE FROM "bricktracker_set_tags"
WHERE "bricktracker_parts"."id" IS NOT DISTINCT FROM '{{ id }}'; WHERE "bricktracker_set_tags"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_statuses"
WHERE "bricktracker_set_statuses"."id" IS NOT DISTINCT FROM '{{ id }}';
DELETE FROM "bricktracker_set_owners"
WHERE "bricktracker_set_owners"."id" IS NOT DISTINCT FROM '{{ id }}';
-- Delete the parent record last
DELETE FROM "bricktracker_sets"
WHERE "bricktracker_sets"."id" IS NOT DISTINCT FROM '{{ id }}';
COMMIT; COMMIT;

View File

@@ -1 +1,8 @@
{% extends 'set/base/full.sql' %} {% extends 'set/base/full.sql' %}
{% block where %}
{% if search_query %}
WHERE (LOWER("rebrickable_sets"."name") LIKE LOWER('%{{ search_query }}%')
OR LOWER("rebrickable_sets"."set") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,81 @@
{% extends 'set/base/full.sql' %}
{% block where %}
WHERE 1=1
{% if search_query %}
AND (LOWER("rebrickable_sets"."name") LIKE LOWER('%{{ search_query }}%')
OR LOWER("rebrickable_sets"."set") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% if theme_filter %}
AND "rebrickable_sets"."theme_id" = {{ theme_filter }}
{% endif %}
{% if year_filter %}
AND "rebrickable_sets"."year" = {{ year_filter }}
{% endif %}
{% if storage_filter %}
AND "bricktracker_sets"."storage" = '{{ storage_filter }}'
{% endif %}
{% if purchase_location_filter %}
AND "bricktracker_sets"."purchase_location" = '{{ purchase_location_filter }}'
{% endif %}
{% if status_filter %}
{% if status_filter == 'has-missing' %}
AND IFNULL("problem_join"."total_missing", 0) > 0
{% elif status_filter == '-has-missing' %}
AND IFNULL("problem_join"."total_missing", 0) = 0
{% elif status_filter == 'has-damaged' %}
AND IFNULL("problem_join"."total_damaged", 0) > 0
{% elif status_filter == '-has-damaged' %}
AND IFNULL("problem_join"."total_damaged", 0) = 0
{% elif status_filter == 'has-storage' %}
AND "bricktracker_sets"."storage" IS NOT NULL AND "bricktracker_sets"."storage" != ''
{% elif status_filter == '-has-storage' %}
AND ("bricktracker_sets"."storage" IS NULL OR "bricktracker_sets"."storage" = '')
{% elif status_filter.startswith('status-') %}
AND EXISTS (
SELECT 1 FROM "bricktracker_set_statuses"
WHERE "bricktracker_set_statuses"."id" = "bricktracker_sets"."id"
AND "bricktracker_set_statuses"."{{ status_filter.replace('-', '_') }}" = 1
)
{% elif status_filter.startswith('-status-') %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_set_statuses"
WHERE "bricktracker_set_statuses"."id" = "bricktracker_sets"."id"
AND "bricktracker_set_statuses"."{{ status_filter[1:].replace('-', '_') }}" = 1
)
{% endif %}
{% endif %}
{% if owner_filter %}
{% if owner_filter.startswith('owner-') %}
AND EXISTS (
SELECT 1 FROM "bricktracker_set_owners"
WHERE "bricktracker_set_owners"."id" = "bricktracker_sets"."id"
AND "bricktracker_set_owners"."{{ owner_filter.replace('-', '_') }}" = 1
)
{% endif %}
{% endif %}
{% if tag_filter %}
{% if tag_filter.startswith('tag-') %}
AND EXISTS (
SELECT 1 FROM "bricktracker_set_tags"
WHERE "bricktracker_set_tags"."id" = "bricktracker_sets"."id"
AND "bricktracker_set_tags"."{{ tag_filter.replace('-', '_') }}" = 1
)
{% endif %}
{% endif %}
{% if duplicate_filter %}
AND (
SELECT COUNT(*)
FROM "bricktracker_sets" as "duplicate_check"
WHERE "duplicate_check"."set" = "bricktracker_sets"."set"
) > 1
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,178 @@
SELECT
(SELECT MIN("id") FROM "bricktracker_sets" WHERE "set" = "rebrickable_sets"."set") AS "id",
"rebrickable_sets"."set",
"rebrickable_sets"."number",
"rebrickable_sets"."version",
"rebrickable_sets"."name",
"rebrickable_sets"."year",
"rebrickable_sets"."theme_id",
"rebrickable_sets"."number_of_parts",
"rebrickable_sets"."image",
"rebrickable_sets"."url",
COUNT("bricktracker_sets"."id") AS "instance_count",
IFNULL(SUM("problem_join"."total_missing"), 0) AS "total_missing",
IFNULL(SUM("problem_join"."total_damaged"), 0) AS "total_damaged",
IFNULL(MAX("minifigures_join"."total"), 0) AS "total_minifigures",
-- Keep one representative instance for display purposes
GROUP_CONCAT("bricktracker_sets"."id", '|') AS "instance_ids",
REPLACE(GROUP_CONCAT(DISTINCT "bricktracker_sets"."storage"), ',', '|') AS "storage",
MIN("bricktracker_sets"."purchase_date") AS "purchase_date",
MAX("bricktracker_sets"."purchase_date") AS "purchase_date_max",
REPLACE(GROUP_CONCAT(DISTINCT "bricktracker_sets"."purchase_location"), ',', '|') AS "purchase_location",
ROUND(AVG("bricktracker_sets"."purchase_price"), 1) AS "purchase_price"
{% block owners %}
{% if owners_dict %}
{% for column, uuid in owners_dict.items() %}
, MAX("bricktracker_set_owners"."{{ column }}") AS "{{ column }}"
{% endfor %}
{% endif %}
{% endblock %}
{% block tags %}
{% if tags_dict %}
{% for column, uuid in tags_dict.items() %}
, MAX("bricktracker_set_tags"."{{ column }}") AS "{{ column }}"
{% endfor %}
{% endif %}
{% endblock %}
{% block statuses %}
{% if statuses_dict %}
{% for column, uuid in statuses_dict.items() %}
, MAX("bricktracker_set_statuses"."{{ column }}") AS "{{ column }}"
, IFNULL(SUM("bricktracker_set_statuses"."{{ column }}"), 0) AS "{{ column }}_count"
{% endfor %}
{% endif %}
{% endblock %}
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
-- LEFT JOIN + SELECT to avoid messing the total
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "total_missing",
SUM("bricktracker_parts"."damaged") AS "total_damaged"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_join"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "problem_join"."id"
-- LEFT JOIN + SELECT to avoid messing the total
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "total"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigures_join"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "minifigures_join"."id"
{% if owners_dict %}
LEFT JOIN "bricktracker_set_owners"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_owners"."id"
{% endif %}
{% if statuses_dict %}
LEFT JOIN "bricktracker_set_statuses"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_statuses"."id"
{% endif %}
{% if tags_dict %}
LEFT JOIN "bricktracker_set_tags"
ON "bricktracker_sets"."id" IS NOT DISTINCT FROM "bricktracker_set_tags"."id"
{% endif %}
{% block where %}
WHERE 1=1
{% if search_query %}
AND (LOWER("rebrickable_sets"."name") LIKE LOWER('%{{ search_query }}%')
OR LOWER("rebrickable_sets"."set") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% if theme_filter %}
AND "rebrickable_sets"."theme_id" = {{ theme_filter }}
{% endif %}
{% if year_filter %}
AND "rebrickable_sets"."year" = {{ year_filter }}
{% endif %}
{% if storage_filter %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" = '{{ storage_filter }}'
)
{% endif %}
{% if purchase_location_filter %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."purchase_location" = '{{ purchase_location_filter }}'
)
{% endif %}
{% if status_filter %}
{% if status_filter == 'has-storage' %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" IS NOT NULL AND bs_filter."storage" != ''
)
{% elif status_filter == '-has-storage' %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" IS NOT NULL AND bs_filter."storage" != ''
)
{% elif status_filter.startswith('status-') %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_set_statuses" ON bs_filter."id" = "bricktracker_set_statuses"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_set_statuses"."{{ status_filter.replace('-', '_') }}" = 1
)
{% elif status_filter.startswith('-status-') %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_set_statuses" ON bs_filter."id" = "bricktracker_set_statuses"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_set_statuses"."{{ status_filter[1:].replace('-', '_') }}" = 1
)
{% endif %}
{% endif %}
{% endblock %}
GROUP BY "rebrickable_sets"."set"
{% if status_filter or duplicate_filter %}
HAVING 1=1
{% if status_filter %}
{% if status_filter == 'has-missing' %}
AND IFNULL(SUM("problem_join"."total_missing"), 0) > 0
{% elif status_filter == '-has-missing' %}
AND IFNULL(SUM("problem_join"."total_missing"), 0) = 0
{% elif status_filter == 'has-damaged' %}
AND IFNULL(SUM("problem_join"."total_damaged"), 0) > 0
{% elif status_filter == '-has-damaged' %}
AND IFNULL(SUM("problem_join"."total_damaged"), 0) = 0
{% endif %}
{% endif %}
{% if duplicate_filter %}
AND COUNT("bricktracker_sets"."id") > 1
{% endif %}
{% endif %}
{% if order %}
ORDER BY {{ order }}
{% endif %}
{% if limit %}
LIMIT {{ limit }}
{% endif %}
{% if offset %}
OFFSET {{ offset }}
{% endif %}

View File

@@ -5,7 +5,7 @@ WHERE "bricktracker_sets"."id" IN (
SELECT "bricktracker_parts"."id" SELECT "bricktracker_parts"."id"
FROM "bricktracker_parts" FROM "bricktracker_parts"
WHERE "bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure WHERE "bricktracker_parts"."figure" IS NOT DISTINCT FROM :figure
AND "bricktracker_parts"."missing" > 0 AND "bricktracker_parts"."damaged" > 0
GROUP BY "bricktracker_parts"."id" GROUP BY "bricktracker_parts"."id"
) )
{% endblock %} {% endblock %}

View File

@@ -0,0 +1,87 @@
SELECT DISTINCT "rebrickable_sets"."theme_id"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% block where %}
WHERE 1=1
{% if search_query %}
AND (LOWER("rebrickable_sets"."name") LIKE LOWER('%{{ search_query }}%')
OR LOWER("rebrickable_sets"."set") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% if storage_filter %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" = '{{ storage_filter }}'
)
{% endif %}
{% if purchase_location_filter %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."purchase_location" = '{{ purchase_location_filter }}'
)
{% endif %}
{% if status_filter %}
{% if status_filter == 'has-storage' %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" IS NOT NULL AND bs_filter."storage" != ''
)
{% elif status_filter == '-has-storage' %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND bs_filter."storage" IS NOT NULL AND bs_filter."storage" != ''
)
{% elif status_filter.startswith('status-') %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_set_statuses" ON bs_filter."id" = "bricktracker_set_statuses"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_set_statuses"."{{ status_filter.replace('-', '_') }}" = 1
)
{% elif status_filter.startswith('-status-') %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_set_statuses" ON bs_filter."id" = "bricktracker_set_statuses"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_set_statuses"."{{ status_filter[1:].replace('-', '_') }}" = 1
)
{% elif status_filter == 'has-missing' %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_parts" ON bs_filter."id" = "bricktracker_parts"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_parts"."missing" > 0
)
{% elif status_filter == '-has-missing' %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_parts" ON bs_filter."id" = "bricktracker_parts"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_parts"."missing" > 0
)
{% elif status_filter == 'has-damaged' %}
AND EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_parts" ON bs_filter."id" = "bricktracker_parts"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_parts"."damaged" > 0
)
{% elif status_filter == '-has-damaged' %}
AND NOT EXISTS (
SELECT 1 FROM "bricktracker_sets" bs_filter
JOIN "bricktracker_parts" ON bs_filter."id" = "bricktracker_parts"."id"
WHERE bs_filter."set" = "rebrickable_sets"."set"
AND "bricktracker_parts"."damaged" > 0
)
{% endif %}
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,13 @@
SELECT DISTINCT "rebrickable_sets"."year"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets"
ON "bricktracker_sets"."set" IS NOT DISTINCT FROM "rebrickable_sets"."set"
{% block where %}
WHERE 1=1
{% if search_query %}
AND (LOWER("rebrickable_sets"."name") LIKE LOWER('%{{ search_query }}%')
OR LOWER("rebrickable_sets"."set") LIKE LOWER('%{{ search_query }}%'))
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,83 @@
-- Statistics Overview Query (Optimized with CTEs)
-- Provides comprehensive statistics for BrickTracker dashboard
-- Performance improved by consolidating subqueries into CTEs
-- Expected impact: 60-80% performance improvement for dashboard loading
WITH
-- Set statistics aggregation
set_stats AS (
SELECT
COUNT(*) AS total_sets,
COUNT(DISTINCT "set") AS unique_sets,
COUNT(CASE WHEN "purchase_price" IS NOT NULL THEN 1 END) AS sets_with_price,
ROUND(SUM("purchase_price"), 2) AS total_cost,
ROUND(AVG("purchase_price"), 2) AS average_cost,
ROUND(MIN("purchase_price"), 2) AS minimum_cost,
ROUND(MAX("purchase_price"), 2) AS maximum_cost,
COUNT(DISTINCT CASE WHEN "storage" IS NOT NULL THEN "storage" END) AS storage_locations_used,
COUNT(DISTINCT CASE WHEN "purchase_location" IS NOT NULL THEN "purchase_location" END) AS purchase_locations_used,
COUNT(CASE WHEN "storage" IS NOT NULL THEN 1 END) AS sets_with_storage,
COUNT(CASE WHEN "purchase_location" IS NOT NULL THEN 1 END) AS sets_with_purchase_location
FROM "bricktracker_sets"
),
-- Part statistics aggregation
part_stats AS (
SELECT
COUNT(*) AS total_part_instances,
SUM("quantity") AS total_parts_count,
COUNT(DISTINCT "part") AS unique_parts,
SUM("missing") AS total_missing_parts,
SUM("damaged") AS total_damaged_parts
FROM "bricktracker_parts"
),
-- Minifigure statistics aggregation
minifig_stats AS (
SELECT
COUNT(*) AS total_minifigure_instances,
SUM("quantity") AS total_minifigures_count,
COUNT(DISTINCT "figure") AS unique_minifigures
FROM "bricktracker_minifigures"
),
-- Rebrickable sets count (for sets we actually own)
rebrickable_stats AS (
SELECT COUNT(*) AS unique_rebrickable_sets
FROM "rebrickable_sets"
WHERE "set" IN (SELECT DISTINCT "set" FROM "bricktracker_sets")
)
-- Final select combining all statistics
SELECT
-- Basic counts
set_stats.total_sets,
set_stats.unique_sets,
rebrickable_stats.unique_rebrickable_sets,
-- Parts statistics
part_stats.total_part_instances,
part_stats.total_parts_count,
part_stats.unique_parts,
part_stats.total_missing_parts,
part_stats.total_damaged_parts,
-- Minifigures statistics
minifig_stats.total_minifigure_instances,
minifig_stats.total_minifigures_count,
minifig_stats.unique_minifigures,
-- Financial statistics
set_stats.sets_with_price,
set_stats.total_cost,
set_stats.average_cost,
set_stats.minimum_cost,
set_stats.maximum_cost,
-- Storage and location statistics
set_stats.storage_locations_used,
set_stats.purchase_locations_used,
set_stats.sets_with_storage,
set_stats.sets_with_purchase_location
FROM set_stats, part_stats, minifig_stats, rebrickable_stats

View File

@@ -0,0 +1,45 @@
-- Purchase Location Statistics
-- Shows statistics grouped by purchase location
SELECT
"bricktracker_sets"."purchase_location" AS "location_id",
"bricktracker_metadata_purchase_locations"."name" AS "location_name",
COUNT("bricktracker_sets"."id") AS "set_count",
COUNT(DISTINCT "bricktracker_sets"."set") AS "unique_set_count",
SUM("rebrickable_sets"."number_of_parts") AS "total_parts",
ROUND(AVG("rebrickable_sets"."number_of_parts"), 0) AS "avg_parts_per_set",
-- Financial statistics per purchase location
COUNT(CASE WHEN "bricktracker_sets"."purchase_price" IS NOT NULL THEN 1 END) AS "sets_with_price",
ROUND(SUM("bricktracker_sets"."purchase_price"), 2) AS "total_spent",
ROUND(AVG("bricktracker_sets"."purchase_price"), 2) AS "avg_price",
ROUND(MIN("bricktracker_sets"."purchase_price"), 2) AS "min_price",
ROUND(MAX("bricktracker_sets"."purchase_price"), 2) AS "max_price",
-- Date range statistics
MIN("bricktracker_sets"."purchase_date") AS "first_purchase",
MAX("bricktracker_sets"."purchase_date") AS "latest_purchase",
-- Problem statistics per purchase location
COALESCE(SUM("problem_stats"."missing_parts"), 0) AS "missing_parts",
COALESCE(SUM("problem_stats"."damaged_parts"), 0) AS "damaged_parts",
-- Minifigure statistics per purchase location
COALESCE(SUM("minifigure_stats"."minifigure_count"), 0) AS "total_minifigures"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
LEFT JOIN "bricktracker_metadata_purchase_locations" ON "bricktracker_sets"."purchase_location" = "bricktracker_metadata_purchase_locations"."id"
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "missing_parts",
SUM("bricktracker_parts"."damaged") AS "damaged_parts"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_stats" ON "bricktracker_sets"."id" = "problem_stats"."id"
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "minifigure_count"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigure_stats" ON "bricktracker_sets"."id" = "minifigure_stats"."id"
WHERE "bricktracker_sets"."purchase_location" IS NOT NULL
GROUP BY "bricktracker_sets"."purchase_location", "bricktracker_metadata_purchase_locations"."name"
ORDER BY "set_count" DESC, "location_name" ASC

View File

@@ -0,0 +1,49 @@
-- Purchases by Year Statistics
-- Shows statistics grouped by purchase year (when you bought the sets)
SELECT
strftime('%Y', datetime("bricktracker_sets"."purchase_date", 'unixepoch')) AS "purchase_year",
COUNT("bricktracker_sets"."id") AS "total_sets",
COUNT(DISTINCT "bricktracker_sets"."set") AS "unique_sets",
SUM("rebrickable_sets"."number_of_parts") AS "total_parts",
ROUND(AVG("rebrickable_sets"."number_of_parts"), 0) AS "avg_parts_per_set",
-- Financial statistics per purchase year
COUNT(CASE WHEN "bricktracker_sets"."purchase_price" IS NOT NULL THEN 1 END) AS "sets_with_price",
ROUND(SUM("bricktracker_sets"."purchase_price"), 2) AS "total_spent",
ROUND(AVG("bricktracker_sets"."purchase_price"), 2) AS "avg_price_per_set",
ROUND(MIN("bricktracker_sets"."purchase_price"), 2) AS "min_price",
ROUND(MAX("bricktracker_sets"."purchase_price"), 2) AS "max_price",
-- Release year statistics for sets purchased in this year
MIN("rebrickable_sets"."year") AS "oldest_set_year",
MAX("rebrickable_sets"."year") AS "newest_set_year",
ROUND(AVG("rebrickable_sets"."year"), 0) AS "avg_set_release_year",
-- Problem statistics per purchase year
COALESCE(SUM("problem_stats"."missing_parts"), 0) AS "missing_parts",
COALESCE(SUM("problem_stats"."damaged_parts"), 0) AS "damaged_parts",
-- Minifigure statistics per purchase year
COALESCE(SUM("minifigure_stats"."minifigure_count"), 0) AS "total_minifigures",
-- Diversity statistics per purchase year
COUNT(DISTINCT "rebrickable_sets"."theme_id") AS "unique_themes",
COUNT(DISTINCT "bricktracker_sets"."purchase_location") AS "unique_purchase_locations",
-- Monthly statistics within the year
COUNT(DISTINCT strftime('%m', datetime("bricktracker_sets"."purchase_date", 'unixepoch'))) AS "months_with_purchases"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "missing_parts",
SUM("bricktracker_parts"."damaged") AS "damaged_parts"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_stats" ON "bricktracker_sets"."id" = "problem_stats"."id"
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "minifigure_count"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigure_stats" ON "bricktracker_sets"."id" = "minifigure_stats"."id"
WHERE "bricktracker_sets"."purchase_date" IS NOT NULL
GROUP BY strftime('%Y', datetime("bricktracker_sets"."purchase_date", 'unixepoch'))
ORDER BY "purchase_year" DESC

View File

@@ -0,0 +1,44 @@
-- Sets by Year Statistics
-- Shows statistics grouped by LEGO set release year
SELECT
"rebrickable_sets"."year",
COUNT("bricktracker_sets"."id") AS "total_sets",
COUNT(DISTINCT "bricktracker_sets"."set") AS "unique_sets",
SUM("rebrickable_sets"."number_of_parts") AS "total_parts",
ROUND(AVG("rebrickable_sets"."number_of_parts"), 0) AS "avg_parts_per_set",
MIN("rebrickable_sets"."number_of_parts") AS "min_parts",
MAX("rebrickable_sets"."number_of_parts") AS "max_parts",
-- Financial statistics per year (release year)
COUNT(CASE WHEN "bricktracker_sets"."purchase_price" IS NOT NULL THEN 1 END) AS "sets_with_price",
ROUND(SUM("bricktracker_sets"."purchase_price"), 2) AS "total_spent",
ROUND(AVG("bricktracker_sets"."purchase_price"), 2) AS "avg_price_per_set",
ROUND(MIN("bricktracker_sets"."purchase_price"), 2) AS "min_price",
ROUND(MAX("bricktracker_sets"."purchase_price"), 2) AS "max_price",
-- Problem statistics per year
COALESCE(SUM("problem_stats"."missing_parts"), 0) AS "missing_parts",
COALESCE(SUM("problem_stats"."damaged_parts"), 0) AS "damaged_parts",
-- Minifigure statistics per year
COALESCE(SUM("minifigure_stats"."minifigure_count"), 0) AS "total_minifigures",
-- Theme diversity per year
COUNT(DISTINCT "rebrickable_sets"."theme_id") AS "unique_themes"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "missing_parts",
SUM("bricktracker_parts"."damaged") AS "damaged_parts"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_stats" ON "bricktracker_sets"."id" = "problem_stats"."id"
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "minifigure_count"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigure_stats" ON "bricktracker_sets"."id" = "minifigure_stats"."id"
WHERE "rebrickable_sets"."year" IS NOT NULL
GROUP BY "rebrickable_sets"."year"
ORDER BY "rebrickable_sets"."year" DESC

View File

@@ -0,0 +1,40 @@
-- Storage Location Statistics
-- Shows statistics grouped by storage location
SELECT
"bricktracker_sets"."storage" AS "storage_id",
"bricktracker_metadata_storages"."name" AS "storage_name",
COUNT("bricktracker_sets"."id") AS "set_count",
COUNT(DISTINCT "bricktracker_sets"."set") AS "unique_set_count",
SUM("rebrickable_sets"."number_of_parts") AS "total_parts",
ROUND(AVG("rebrickable_sets"."number_of_parts"), 0) AS "avg_parts_per_set",
-- Financial statistics per storage
COUNT(CASE WHEN "bricktracker_sets"."purchase_price" IS NOT NULL THEN 1 END) AS "sets_with_price",
ROUND(SUM("bricktracker_sets"."purchase_price"), 2) AS "total_value",
ROUND(AVG("bricktracker_sets"."purchase_price"), 2) AS "avg_price",
-- Problem statistics per storage
COALESCE(SUM("problem_stats"."missing_parts"), 0) AS "missing_parts",
COALESCE(SUM("problem_stats"."damaged_parts"), 0) AS "damaged_parts",
-- Minifigure statistics per storage
COALESCE(SUM("minifigure_stats"."minifigure_count"), 0) AS "total_minifigures"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
LEFT JOIN "bricktracker_metadata_storages" ON "bricktracker_sets"."storage" = "bricktracker_metadata_storages"."id"
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "missing_parts",
SUM("bricktracker_parts"."damaged") AS "damaged_parts"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_stats" ON "bricktracker_sets"."id" = "problem_stats"."id"
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "minifigure_count"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigure_stats" ON "bricktracker_sets"."id" = "minifigure_stats"."id"
WHERE "bricktracker_sets"."storage" IS NOT NULL
GROUP BY "bricktracker_sets"."storage", "bricktracker_metadata_storages"."name"
ORDER BY "set_count" DESC, "storage_name" ASC

View File

@@ -0,0 +1,39 @@
-- Theme Distribution Statistics
-- Shows statistics grouped by theme
SELECT
"rebrickable_sets"."theme_id",
COUNT("bricktracker_sets"."id") AS "set_count",
COUNT(DISTINCT "bricktracker_sets"."set") AS "unique_set_count",
SUM("rebrickable_sets"."number_of_parts") AS "total_parts",
ROUND(AVG("rebrickable_sets"."number_of_parts"), 0) AS "avg_parts_per_set",
MIN("rebrickable_sets"."year") AS "earliest_year",
MAX("rebrickable_sets"."year") AS "latest_year",
-- Financial statistics per theme
COUNT(CASE WHEN "bricktracker_sets"."purchase_price" IS NOT NULL THEN 1 END) AS "sets_with_price",
ROUND(SUM("bricktracker_sets"."purchase_price"), 2) AS "total_spent",
ROUND(AVG("bricktracker_sets"."purchase_price"), 2) AS "avg_price",
-- Problem statistics per theme
COALESCE(SUM("problem_stats"."missing_parts"), 0) AS "missing_parts",
COALESCE(SUM("problem_stats"."damaged_parts"), 0) AS "damaged_parts",
-- Minifigure statistics per theme
COALESCE(SUM("minifigure_stats"."minifigure_count"), 0) AS "total_minifigures"
FROM "bricktracker_sets"
INNER JOIN "rebrickable_sets" ON "bricktracker_sets"."set" = "rebrickable_sets"."set"
LEFT JOIN (
SELECT
"bricktracker_parts"."id",
SUM("bricktracker_parts"."missing") AS "missing_parts",
SUM("bricktracker_parts"."damaged") AS "damaged_parts"
FROM "bricktracker_parts"
GROUP BY "bricktracker_parts"."id"
) "problem_stats" ON "bricktracker_sets"."id" = "problem_stats"."id"
LEFT JOIN (
SELECT
"bricktracker_minifigures"."id",
SUM("bricktracker_minifigures"."quantity") AS "minifigure_count"
FROM "bricktracker_minifigures"
GROUP BY "bricktracker_minifigures"."id"
) "minifigure_stats" ON "bricktracker_sets"."id" = "minifigure_stats"."id"
GROUP BY "rebrickable_sets"."theme_id"
ORDER BY "set_count" DESC, "rebrickable_sets"."theme_id" ASC

132
bricktracker/statistics.py Normal file
View File

@@ -0,0 +1,132 @@
"""
Statistics module for BrickTracker
Provides statistics and analytics functionality
"""
import logging
from typing import Any
from .sql import BrickSQL
from .theme_list import BrickThemeList
logger = logging.getLogger(__name__)
class BrickStatistics:
"""Main statistics class providing overview and detailed statistics"""
def __init__(self):
self.sql = BrickSQL()
def get_overview(self) -> dict[str, Any]:
"""Get overview statistics"""
result = self.sql.fetchone('statistics/overview')
if result:
return dict(result)
return {}
def get_theme_statistics(self) -> list[dict[str, Any]]:
"""Get statistics grouped by theme with theme names"""
results = self.sql.fetchall('statistics/themes')
# Load theme list to get theme names
theme_list = BrickThemeList()
statistics = []
for row in results:
stat = dict(row)
# Add theme name from theme list
theme = theme_list.get(stat['theme_id'])
stat['theme_name'] = theme.name if theme else f"Theme {stat['theme_id']}"
statistics.append(stat)
return statistics
def get_storage_statistics(self) -> list[dict[str, Any]]:
"""Get statistics grouped by storage location"""
results = self.sql.fetchall('statistics/storage')
return [dict(row) for row in results]
def get_purchase_location_statistics(self) -> list[dict[str, Any]]:
"""Get statistics grouped by purchase location"""
results = self.sql.fetchall('statistics/purchase_locations')
return [dict(row) for row in results]
def get_financial_summary(self) -> dict[str, Any]:
"""Get financial summary from overview statistics"""
overview = self.get_overview()
return {
'total_cost': overview.get('total_cost') or 0,
'average_cost': overview.get('average_cost') or 0,
'minimum_cost': overview.get('minimum_cost') or 0,
'maximum_cost': overview.get('maximum_cost') or 0,
'sets_with_price': overview.get('sets_with_price') or 0,
'total_sets': overview.get('total_sets') or 0,
'percentage_with_price': round(
((overview.get('sets_with_price') or 0) / max((overview.get('total_sets') or 0), 1)) * 100, 1
)
}
def get_collection_summary(self) -> dict[str, Any]:
"""Get collection summary from overview statistics"""
overview = self.get_overview()
return {
'total_sets': overview.get('total_sets') or 0,
'unique_sets': overview.get('unique_sets') or 0,
'total_parts_count': overview.get('total_parts_count') or 0,
'unique_parts': overview.get('unique_parts') or 0,
'total_minifigures_count': overview.get('total_minifigures_count') or 0,
'unique_minifigures': overview.get('unique_minifigures') or 0,
'total_missing_parts': overview.get('total_missing_parts') or 0,
'total_damaged_parts': overview.get('total_damaged_parts') or 0,
'storage_locations_used': overview.get('storage_locations_used') or 0,
'purchase_locations_used': overview.get('purchase_locations_used') or 0
}
def get_sets_by_year_statistics(self) -> list[dict[str, Any]]:
"""Get statistics grouped by LEGO set release year"""
results = self.sql.fetchall('statistics/sets_by_year')
return [dict(row) for row in results]
def get_purchases_by_year_statistics(self) -> list[dict[str, Any]]:
"""Get statistics grouped by purchase year"""
results = self.sql.fetchall('statistics/purchases_by_year')
return [dict(row) for row in results]
def get_year_summary(self) -> dict[str, Any]:
"""Get year-based summary statistics"""
sets_by_year = self.get_sets_by_year_statistics()
purchases_by_year = self.get_purchases_by_year_statistics()
# Calculate summary metrics
years_represented = len(sets_by_year)
years_with_purchases = len(purchases_by_year)
# Find peak year for collection (by set count)
peak_collection_year = None
max_sets_in_year = 0
if sets_by_year:
peak_year_data = max(sets_by_year, key=lambda x: x.get('total_sets') or 0)
peak_collection_year = peak_year_data.get('year')
max_sets_in_year = peak_year_data.get('total_sets') or 0
# Find peak spending year
peak_spending_year = None
max_spending = 0
if purchases_by_year:
spending_years = [y for y in purchases_by_year if y.get('total_spent')]
if spending_years:
peak_spending_data = max(spending_years, key=lambda x: x.get('total_spent') or 0)
peak_spending_year = peak_spending_data.get('purchase_year')
max_spending = peak_spending_data.get('total_spent') or 0
return {
'years_represented': years_represented,
'years_with_purchases': years_with_purchases,
'peak_collection_year': peak_collection_year,
'max_sets_in_year': max_sets_in_year,
'peak_spending_year': peak_spending_year,
'max_spending': max_spending,
'oldest_set_year': min([y['year'] for y in sets_by_year]) if sets_by_year else None,
'newest_set_year': max([y['year'] for y in sets_by_year]) if sets_by_year else None
}

View File

@@ -0,0 +1,13 @@
"""Custom Jinja2 template filters for BrickTracker."""
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
def replace_query_filter(url, key, value):
"""Replace or add a query parameter in a URL"""
parsed = urlparse(url)
query_dict = parse_qs(parsed.query, keep_blank_values=True)
query_dict[key] = [str(value)]
new_query = urlencode(query_dict, doseq=True)
return urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, new_query, parsed.fragment))

View File

@@ -1,4 +1,4 @@
from typing import Final from typing import Final
__version__: Final[str] = '1.2.4' __version__: Final[str] = '1.3.0'
__database_version__: Final[int] = 17 __database_version__: Final[int] = 20

View File

@@ -1,9 +1,12 @@
import logging import logging
import os
from flask import Blueprint, request, render_template from flask import Blueprint, request, render_template, current_app, jsonify
from flask_login import login_required from flask_login import login_required
from ...configuration_list import BrickConfigurationList from ...configuration_list import BrickConfigurationList
from ...config_manager import ConfigManager
from ...config import CONFIG
from ..exceptions import exception_handler from ..exceptions import exception_handler
from ...instructions_list import BrickInstructionsList from ...instructions_list import BrickInstructionsList
from ...rebrickable_image import RebrickableImage from ...rebrickable_image import RebrickableImage
@@ -27,6 +30,94 @@ logger = logging.getLogger(__name__)
admin_page = Blueprint('admin', __name__, url_prefix='/admin') admin_page = Blueprint('admin', __name__, url_prefix='/admin')
def get_env_values():
"""Get current environment values, using defaults from config when not set"""
import json
from pathlib import Path
env_values = {}
config_defaults = {}
env_explicit_values = {} # Track which values are explicitly set
env_locked_values = {} # Track which values are set via Docker environment (locked)
# Get list of variables that were set via Docker environment (before .env was loaded)
# This was stored by load_env_file() in app.py
docker_env_vars = set()
if '_BK_DOCKER_ENV_VARS' in os.environ:
try:
docker_env_vars = set(json.loads(os.environ['_BK_DOCKER_ENV_VARS']))
except (json.JSONDecodeError, TypeError):
pass
# Read .env file if it exists (check both locations)
env_file = None
if Path('data/.env').exists():
env_file = Path('data/.env')
elif Path('.env').exists():
env_file = Path('.env')
env_from_file = {}
if env_file:
with open(env_file, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip()
if line and not line.startswith('#') and '=' in line:
key, value = line.split('=', 1)
# Strip quotes from value when reading
value = value.strip()
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
elif value.startswith("'") and value.endswith("'"):
value = value[1:-1]
env_from_file[key] = value
# Process each config item
for config_item in CONFIG:
env_name = f"BK_{config_item['n']}"
# Store default value (with casting applied)
default_value = config_item.get('d', '')
if 'c' in config_item and default_value is not None:
cast_type = config_item['c']
if cast_type == bool and default_value == '':
default_value = False # Default for booleans is False only if no default specified
elif cast_type == list and isinstance(default_value, str):
default_value = [item.strip() for item in default_value.split(',') if item.strip()]
# For int/other types, keep the original default value
config_defaults[env_name] = default_value
# Check if value is set via Docker environment directive
# A variable is "locked" if it was in os.environ BEFORE our app loaded .env
is_locked = env_name in docker_env_vars
env_locked_values[env_name] = is_locked
# Check if value is explicitly set in .env file or environment
is_explicitly_set = env_name in env_from_file or env_name in os.environ
env_explicit_values[env_name] = is_explicitly_set
# Get value from .env file, environment, or default
value = env_from_file.get(env_name) or os.environ.get(env_name)
if value is None:
value = default_value
else:
# Apply casting if specified
if 'c' in config_item and value is not None:
cast_type = config_item['c']
if cast_type == bool and isinstance(value, str):
value = value.lower() in ('true', '1', 'yes', 'on')
elif cast_type == int and value != '':
try:
value = int(value)
except (ValueError, TypeError):
value = config_item.get('d', 0)
elif cast_type == list and isinstance(value, str):
value = [item.strip() for item in value.split(',') if item.strip()]
env_values[env_name] = value
return env_values, config_defaults, env_explicit_values, env_locked_values
# Admin # Admin
@admin_page.route('/', methods=['GET']) @admin_page.route('/', methods=['GET'])
@login_required @login_required
@@ -102,18 +193,72 @@ def admin() -> str:
open_tag open_tag
) )
open_database = ( # Get configurable default expanded sections
open_image is None and default_expanded_sections = current_app.config.get('ADMIN_DEFAULT_EXPANDED_SECTIONS', [])
open_instructions is None and
open_logout is None and # Helper function to check if section should be expanded
not open_metadata and def should_expand(section_name, url_param):
open_retired is None and # URL parameter takes priority over default config
open_theme is None if url_param is not None:
return url_param
# Check if section is in default expanded list
return section_name in default_expanded_sections
# Apply configurable default expansion logic
open_database = should_expand('database', request.args.get('open_database', None))
open_image = should_expand('image', open_image)
open_instructions = should_expand('instructions', open_instructions)
open_logout = should_expand('authentication', open_logout)
open_retired = should_expand('retired', open_retired)
open_theme = should_expand('theme', open_theme)
# Metadata sub-sections
open_owner = should_expand('owner', open_owner)
open_purchase_location = should_expand('purchase_location', open_purchase_location)
open_status = should_expand('status', open_status)
open_storage = should_expand('storage', open_storage)
open_tag = should_expand('tag', open_tag)
# Recalculate metadata section based on sub-sections or direct config
open_metadata = (
should_expand('metadata', open_metadata) or
open_owner or
open_purchase_location or
open_status or
open_storage or
open_tag
) )
env_values, config_defaults, env_explicit_values, env_locked_values = get_env_values()
# Check .env file location and set warnings
env_file_location = None
env_file_warning = False
env_file_missing = False
if os.path.exists('data/.env'):
env_file_location = 'data/.env'
env_file_warning = False
env_file_missing = False
elif os.path.exists('.env'):
env_file_location = '.env'
env_file_warning = True # Warn: changes won't persist without volume mount
env_file_missing = False
else:
env_file_location = None
env_file_warning = False
env_file_missing = True # Warn: no .env file found
return render_template( return render_template(
'admin.html', 'admin.html',
configuration=BrickConfigurationList.list(), configuration=BrickConfigurationList.list(),
env_values=env_values,
config_defaults=config_defaults,
env_explicit_values=env_explicit_values,
env_locked_values=env_locked_values,
env_file_location=env_file_location,
env_file_warning=env_file_warning,
env_file_missing=env_file_missing,
database_counters=database_counters, database_counters=database_counters,
database_error=request.args.get('database_error'), database_error=request.args.get('database_error'),
database_exception=database_exception, database_exception=database_exception,
@@ -149,3 +294,103 @@ def admin() -> str:
tag_error=request.args.get('tag_error'), tag_error=request.args.get('tag_error'),
theme=BrickThemeList(), theme=BrickThemeList(),
) )
# API Endpoints for Configuration Management
@admin_page.route('/api/config/update', methods=['POST'])
@login_required
@exception_handler(__file__)
def update_config() -> str:
"""Update live configuration variables"""
try:
data = request.get_json()
if not data:
return jsonify({
'status': 'error',
'message': 'No JSON data provided'
}), 400
updates = data.get('updates', {})
if not updates:
return jsonify({
'status': 'error',
'message': 'No updates provided'
}), 400
# Use ConfigManager to update live configuration
config_manager = ConfigManager()
results = config_manager.update_config(updates)
# Check if all updates were successful
successful_updates = {k: v for k, v in results.items() if "successfully" in v}
failed_updates = {k: v for k, v in results.items() if "successfully" not in v}
logger.info(f"Configuration update: {len(successful_updates)} successful, {len(failed_updates)} failed")
if failed_updates:
logger.warning(f"Failed updates: {failed_updates}")
return jsonify({
'status': 'success' if not failed_updates else 'partial',
'results': results,
'successful_count': len(successful_updates),
'failed_count': len(failed_updates)
})
except Exception as e:
logger.error(f"Error updating configuration: {e}")
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@admin_page.route('/api/config/update-static', methods=['POST'])
@login_required
@exception_handler(__file__)
def update_static_config() -> str:
"""Update static configuration variables (requires restart)"""
try:
data = request.get_json()
if not data:
return jsonify({
'status': 'error',
'message': 'No JSON data provided'
}), 400
updates = data.get('updates', {})
if not updates:
return jsonify({
'status': 'error',
'message': 'No updates provided'
}), 400
# Use ConfigManager to update .env file
config_manager = ConfigManager()
# Update each variable in the .env file
updated_count = 0
for var_name, value in updates.items():
try:
config_manager._update_env_file(var_name, value)
updated_count += 1
logger.info(f"Updated static config: {var_name}")
except Exception as e:
logger.error(f"Failed to update static config {var_name}: {e}")
raise e
logger.info(f"Updated {updated_count} static configuration variables")
return jsonify({
'status': 'success',
'message': f'Successfully updated {updated_count} static configuration variables to .env file',
'updated_count': updated_count
})
except Exception as e:
logger.error(f"Error updating static configuration: {e}")
return jsonify({
'status': 'error',
'message': str(e)
}), 500

View File

@@ -0,0 +1,60 @@
import os
import logging
from flask import Blueprint, current_app, send_from_directory, abort
from werkzeug.utils import secure_filename
logger = logging.getLogger(__name__)
data_page = Blueprint(
'data',
__name__,
url_prefix='/data'
)
@data_page.route('/<path:folder>/<filename>')
def serve_data_file(folder: str, filename: str):
"""
Serve files from the data folder (images, PDFs, etc.)
This replaces serving these files from static/ folder.
Security:
- Only allows serving files from configured data folders
- Uses secure_filename to prevent path traversal
- Returns 404 if file doesn't exist or folder not allowed
"""
# Secure the filename to prevent path traversal attacks
safe_filename = secure_filename(filename)
# Get the configured data folders
allowed_folders = {
'sets': current_app.config.get('SETS_FOLDER', './data/sets'),
'parts': current_app.config.get('PARTS_FOLDER', './data/parts'),
'minifigures': current_app.config.get('MINIFIGURES_FOLDER', './data/minifigures'),
'instructions': current_app.config.get('INSTRUCTIONS_FOLDER', './data/instructions'),
}
# Check if the requested folder is allowed
if folder not in allowed_folders:
logger.warning(f"Attempt to access unauthorized folder: {folder}")
abort(404)
# Get the actual folder path
folder_path = allowed_folders[folder]
# If folder_path is relative (not absolute), make it relative to app root
if not os.path.isabs(folder_path):
folder_path = os.path.join(current_app.root_path, folder_path)
# Check if file exists
file_path = os.path.join(folder_path, safe_filename)
if not os.path.isfile(file_path):
logger.debug(f"File not found: {file_path}")
abort(404)
# Verify the resolved path is still within the allowed folder (security check)
if not os.path.abspath(file_path).startswith(os.path.abspath(folder_path)):
logger.warning(f"Path traversal attempt detected: {filename}")
abort(404)
return send_from_directory(folder_path, safe_filename)

View File

@@ -14,6 +14,7 @@ from .exceptions import exception_handler
from ..instructions import BrickInstructions from ..instructions import BrickInstructions
from ..instructions_list import BrickInstructionsList from ..instructions_list import BrickInstructionsList
from ..parser import parse_set from ..parser import parse_set
from ..peeron_instructions import PeeronInstructions
from ..socket import MESSAGES from ..socket import MESSAGES
from .upload import upload_helper from .upload import upload_helper
@@ -24,6 +25,22 @@ instructions_page = Blueprint(
) )
def _render_peeron_select_page(set: str) -> str:
"""Helper function to render the Peeron page selection interface with cached thumbnails."""
peeron = PeeronInstructions(set)
peeron_pages = peeron.find_pages() # This will use the cached thumbnails
current_app.logger.debug(f"[peeron_loaded] Found {len(peeron_pages)} pages for {set}")
return render_template(
'peeron_select.html',
download=True,
pages=peeron_pages,
set=set,
path=current_app.config['SOCKET_PATH'],
namespace=current_app.config['SOCKET_NAMESPACE'],
messages=MESSAGES
)
# Index # Index
@instructions_page.route('/', methods=['GET']) @instructions_page.route('/', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
@@ -141,6 +158,10 @@ def download() -> str:
except Exception: except Exception:
set = '' set = ''
# Check if this is a redirect after Peeron pages were loaded
if request.args.get('peeron_loaded'):
return _render_peeron_select_page(set)
return render_template( return render_template(
'instructions.html', 'instructions.html',
download=True, download=True,
@@ -160,12 +181,50 @@ def do_download() -> str:
except Exception: except Exception:
set = '' set = ''
return render_template( # Check if this is a redirect after Peeron pages were loaded
'instructions.html', if request.args.get('peeron_loaded'):
download=True, return _render_peeron_select_page(set)
instructions=BrickInstructions.find_instructions(set),
set=set, # Try Rebrickable first
path=current_app.config['SOCKET_PATH'], try:
namespace=current_app.config['SOCKET_NAMESPACE'], from .instructions import BrickInstructions
messages=MESSAGES rebrickable_instructions = BrickInstructions.find_instructions(set)
) # Standard Rebrickable instructions found
return render_template(
'instructions.html',
download=True,
instructions=rebrickable_instructions,
set=set,
path=current_app.config['SOCKET_PATH'],
namespace=current_app.config['SOCKET_NAMESPACE'],
messages=MESSAGES
)
except Exception:
# Rebrickable failed, check if Peeron has instructions (without caching thumbnails yet)
try:
peeron = PeeronInstructions(set)
# Just check if pages exist, don't cache thumbnails yet
if peeron.exists():
# Peeron has instructions - show loading interface
return render_template(
'peeron_select.html',
download=True,
loading_peeron=True, # Flag to show loading state
set=set,
path=current_app.config['SOCKET_PATH'],
namespace=current_app.config['SOCKET_NAMESPACE'],
messages=MESSAGES
)
else:
raise Exception("Not found on Peeron either")
except Exception:
return render_template(
'instructions.html',
download=True,
instructions=[],
set=set,
error='No instructions found on Rebrickable or Peeron',
path=current_app.config['SOCKET_PATH'],
namespace=current_app.config['SOCKET_NAMESPACE'],
messages=MESSAGES
)

View File

@@ -1,8 +1,9 @@
from flask import Blueprint, render_template, request from flask import Blueprint, current_app, render_template, request
from .exceptions import exception_handler from .exceptions import exception_handler
from ..minifigure import BrickMinifigure from ..minifigure import BrickMinifigure
from ..minifigure_list import BrickMinifigureList from ..minifigure_list import BrickMinifigureList
from ..pagination_helper import get_pagination_config, build_pagination_context, get_request_params
from ..set_list import BrickSetList, set_metadata_lists from ..set_list import BrickSetList, set_metadata_lists
from ..set_owner_list import BrickSetOwnerList from ..set_owner_list import BrickSetOwnerList
@@ -13,24 +14,84 @@ minifigure_page = Blueprint('minifigure', __name__, url_prefix='/minifigures')
@minifigure_page.route('/', methods=['GET']) @minifigure_page.route('/', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
def list() -> str: def list() -> str:
# Get owner filter from request # Get filter parameters from request
owner_id = request.args.get('owner', 'all') owner_id = request.args.get('owner', 'all')
problems_filter = request.args.get('problems', 'all')
theme_id = request.args.get('theme', 'all')
year = request.args.get('year', 'all')
search_query, sort_field, sort_order, page = get_request_params()
# Get minifigures filtered by owner # Get pagination configuration
if owner_id == 'all' or owner_id is None or owner_id == '': per_page, is_mobile = get_pagination_config('minifigures')
minifigures = BrickMinifigureList().all() use_pagination = per_page > 0
if use_pagination:
# PAGINATION MODE - Server-side pagination with search
minifigures, total_count = BrickMinifigureList().all_filtered_paginated(
owner_id=owner_id,
problems_filter=problems_filter,
theme_id=theme_id,
year=year,
search_query=search_query,
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order
)
pagination_context = build_pagination_context(page, per_page, total_count, is_mobile)
else: else:
minifigures = BrickMinifigureList().all_by_owner(owner_id) # ORIGINAL MODE - Single page with all data for client-side search
if owner_id == 'all' or owner_id is None or owner_id == '':
minifigures = BrickMinifigureList().all_filtered(problems_filter=problems_filter, theme_id=theme_id, year=year)
else:
minifigures = BrickMinifigureList().all_by_owner_filtered(owner_id=owner_id, problems_filter=problems_filter, theme_id=theme_id, year=year)
pagination_context = None
# Get list of owners for filter dropdown # Get list of owners for filter dropdown
owners = BrickSetOwnerList.list() owners = BrickSetOwnerList.list()
return render_template( # Prepare context for dependent filters
'minifigures.html', filter_context = {}
table_collection=minifigures, if owner_id != 'all' and owner_id:
owners=owners, filter_context['owner_id'] = owner_id
selected_owner=owner_id,
) # Get list of themes for filter dropdown
from ..theme_list import BrickThemeList
from ..sql import BrickSQL
theme_list = BrickThemeList()
themes_data = BrickSQL().fetchall('minifigure/themes/list', **filter_context)
themes = []
for theme_data in themes_data:
theme = theme_list.get(theme_data['theme_id'])
themes.append({
'theme_id': theme_data['theme_id'],
'theme_name': theme.name if theme else f"Theme {theme_data['theme_id']}"
})
# Get list of years for filter dropdown
years = BrickSQL().fetchall('minifigure/years/list', **filter_context)
template_context = {
'table_collection': minifigures,
'owners': owners,
'selected_owner': owner_id,
'selected_problems': problems_filter,
'themes': themes,
'selected_theme': theme_id,
'years': years,
'selected_year': year,
'search_query': search_query,
'use_pagination': use_pagination,
'current_sort': sort_field,
'current_order': sort_order
}
if pagination_context:
template_context['pagination'] = pagination_context
return render_template('minifigures.html', **template_context)
# Minifigure details # Minifigure details

View File

@@ -2,6 +2,7 @@ from flask import Blueprint, render_template, request
from .exceptions import exception_handler from .exceptions import exception_handler
from ..minifigure_list import BrickMinifigureList from ..minifigure_list import BrickMinifigureList
from ..pagination_helper import get_pagination_config, build_pagination_context, get_request_params
from ..part import BrickPart from ..part import BrickPart
from ..part_list import BrickPartList from ..part_list import BrickPartList
from ..set_list import BrickSetList, set_metadata_lists from ..set_list import BrickSetList, set_metadata_lists
@@ -15,42 +16,177 @@ part_page = Blueprint('part', __name__, url_prefix='/parts')
@part_page.route('/', methods=['GET']) @part_page.route('/', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
def list() -> str: def list() -> str:
# Get filter parameters from request # Get filter parameters from request
owner_id = request.args.get('owner', 'all') owner_id = request.args.get('owner', 'all')
color_id = request.args.get('color', 'all') color_id = request.args.get('color', 'all')
theme_id = request.args.get('theme', 'all')
year = request.args.get('year', 'all')
search_query, sort_field, sort_order, page = get_request_params()
# Get parts with filters applied # Get pagination configuration
parts = BrickPartList().all_filtered(owner_id, color_id) per_page, is_mobile = get_pagination_config('parts')
use_pagination = per_page > 0
if use_pagination:
# PAGINATION MODE - Server-side pagination with search
parts, total_count = BrickPartList().all_filtered_paginated(
owner_id=owner_id,
color_id=color_id,
theme_id=theme_id,
year=year,
search_query=search_query,
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order
)
pagination_context = build_pagination_context(page, per_page, total_count, is_mobile)
else:
# ORIGINAL MODE - Single page with all data for client-side search
parts = BrickPartList().all_filtered(owner_id, color_id, theme_id, year)
pagination_context = None
# Get list of owners for filter dropdown # Get list of owners for filter dropdown
owners = BrickSetOwnerList.list() owners = BrickSetOwnerList.list()
# Get list of colors for filter dropdown # Prepare context for dependent filters
# Prepare context for color query (filter by owner if selected) filter_context = {}
color_context = {}
if owner_id != 'all' and owner_id: if owner_id != 'all' and owner_id:
color_context['owner_id'] = owner_id filter_context['owner_id'] = owner_id
colors = BrickSQL().fetchall('part/colors/list', **color_context) # Get list of colors for filter dropdown
colors = BrickSQL().fetchall('part/colors/list', **filter_context)
# Get list of themes for filter dropdown
from ..theme_list import BrickThemeList
theme_list = BrickThemeList()
themes_data = BrickSQL().fetchall('part/themes/list', **filter_context)
themes = []
for theme_data in themes_data:
theme = theme_list.get(theme_data['theme_id'])
themes.append({
'theme_id': theme_data['theme_id'],
'theme_name': theme.name if theme else f"Theme {theme_data['theme_id']}"
})
# Get list of years for filter dropdown
years = BrickSQL().fetchall('part/years/list', **filter_context)
template_context = {
'table_collection': parts,
'owners': owners,
'selected_owner': owner_id,
'colors': colors,
'selected_color': color_id,
'themes': themes,
'selected_theme': theme_id,
'years': years,
'selected_year': year,
'search_query': search_query,
'use_pagination': use_pagination,
'current_sort': sort_field,
'current_order': sort_order
}
if pagination_context:
template_context['pagination'] = pagination_context
return render_template('parts.html', **template_context)
return render_template(
'parts.html',
table_collection=parts,
owners=owners,
selected_owner=owner_id,
colors=colors,
selected_color=color_id,
)
# Problem # Problem
@part_page.route('/problem', methods=['GET']) @part_page.route('/problem', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
def problem() -> str: def problem() -> str:
# Get filter parameters from request
owner_id = request.args.get('owner', 'all')
color_id = request.args.get('color', 'all')
theme_id = request.args.get('theme', 'all')
year = request.args.get('year', 'all')
storage_id = request.args.get('storage', 'all')
tag_id = request.args.get('tag', 'all')
search_query, sort_field, sort_order, page = get_request_params()
# Get pagination configuration
per_page, is_mobile = get_pagination_config('problems')
use_pagination = per_page > 0
if use_pagination:
# PAGINATION MODE - Server-side pagination with search and filters
parts, total_count = BrickPartList().problem_paginated(
owner_id=owner_id,
color_id=color_id,
theme_id=theme_id,
year=year,
storage_id=storage_id,
tag_id=tag_id,
search_query=search_query,
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order
)
pagination_context = build_pagination_context(page, per_page, total_count, is_mobile)
else:
# ORIGINAL MODE - Single page with all data for client-side search
parts = BrickPartList().problem_filtered(owner_id, color_id, theme_id, year, storage_id, tag_id)
pagination_context = None
# Get list of owners for filter dropdown
owners = BrickSetOwnerList.list()
# Prepare context for dependent filters
filter_context = {}
if owner_id != 'all' and owner_id:
filter_context['owner_id'] = owner_id
# Get list of colors for filter dropdown (problem parts only)
colors = BrickSQL().fetchall('part/colors/list_problem', **filter_context)
# Get list of themes for filter dropdown (problem parts only)
from ..theme_list import BrickThemeList
theme_list = BrickThemeList()
themes_data = BrickSQL().fetchall('part/themes/list_problem', **filter_context)
themes = []
for theme_data in themes_data:
theme = theme_list.get(theme_data['theme_id'])
themes.append({
'theme_id': theme_data['theme_id'],
'theme_name': theme.name if theme else f"Theme {theme_data['theme_id']}"
})
# Get list of years for filter dropdown (problem parts only)
years = BrickSQL().fetchall('part/years/list_problem', **filter_context)
# Get list of storages for filter dropdown (problem parts only)
storages = BrickSQL().fetchall('part/storages/list_problem', **filter_context)
# Get list of tags for filter dropdown (problem parts only)
tags = BrickSQL().fetchall('part/tags/list_problem', **filter_context)
return render_template( return render_template(
'problem.html', 'problem.html',
table_collection=BrickPartList().problem() table_collection=parts,
pagination=pagination_context,
search_query=search_query,
sort_field=sort_field,
sort_order=sort_order,
use_pagination=use_pagination,
owners=owners,
colors=colors,
selected_owner=owner_id,
selected_color=color_id,
themes=themes,
selected_theme=theme_id,
years=years,
selected_year=year,
storages=storages,
selected_storage=storage_id,
tags=tags,
selected_tag=tag_id
) )

View File

@@ -15,6 +15,7 @@ from werkzeug.wrappers.response import Response
from .exceptions import exception_handler from .exceptions import exception_handler
from ..exceptions import ErrorException from ..exceptions import ErrorException
from ..minifigure import BrickMinifigure from ..minifigure import BrickMinifigure
from ..pagination_helper import get_pagination_config, build_pagination_context, get_request_params
from ..part import BrickPart from ..part import BrickPart
from ..rebrickable_set import RebrickableSet from ..rebrickable_set import RebrickableSet
from ..set import BrickSet from ..set import BrickSet
@@ -35,12 +36,83 @@ set_page = Blueprint('set', __name__, url_prefix='/sets')
@set_page.route('/', methods=['GET']) @set_page.route('/', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
def list() -> str: def list() -> str:
return render_template( # Get filter parameters from request
'sets.html', search_query, sort_field, sort_order, page = get_request_params()
collection=BrickSetList().all(),
brickset_statuses=BrickSetStatusList.list(), # Get filter parameters
status_filter = request.args.get('status')
theme_filter = request.args.get('theme')
owner_filter = request.args.get('owner')
purchase_location_filter = request.args.get('purchase_location')
storage_filter = request.args.get('storage')
tag_filter = request.args.get('tag')
year_filter = request.args.get('year')
duplicate_filter = request.args.get('duplicate', '').lower() == 'true'
# Get pagination configuration
per_page, is_mobile = get_pagination_config('sets')
use_pagination = per_page > 0
if use_pagination:
# PAGINATION MODE - Server-side pagination with search and filters
sets, total_count = BrickSetList().all_filtered_paginated(
search_query=search_query,
page=page,
per_page=per_page,
sort_field=sort_field,
sort_order=sort_order,
status_filter=status_filter,
theme_filter=theme_filter,
owner_filter=owner_filter,
purchase_location_filter=purchase_location_filter,
storage_filter=storage_filter,
tag_filter=tag_filter,
year_filter=year_filter,
duplicate_filter=duplicate_filter,
use_consolidated=current_app.config['SETS_CONSOLIDATION']
)
pagination_context = build_pagination_context(page, per_page, total_count, is_mobile)
else:
# ORIGINAL MODE - Single page with all data for client-side search
if current_app.config['SETS_CONSOLIDATION']:
sets = BrickSetList().all_consolidated()
else:
sets = BrickSetList().all()
pagination_context = None
# Convert theme ID to theme name for dropdown display if needed
display_theme_filter = theme_filter
if theme_filter and theme_filter.isdigit():
# Theme filter is an ID, convert to name for dropdown
# Create a fresh BrickSetList instance for theme conversion
converter = BrickSetList()
theme_name = converter._theme_id_to_name(theme_filter)
if theme_name:
display_theme_filter = theme_name
template_context = {
'collection': sets,
'search_query': search_query,
'use_pagination': use_pagination,
'current_sort': sort_field,
'current_order': sort_order,
'current_status_filter': status_filter,
'current_theme_filter': display_theme_filter,
'current_owner_filter': owner_filter,
'current_purchase_location_filter': purchase_location_filter,
'current_storage_filter': storage_filter,
'current_tag_filter': tag_filter,
'current_year_filter': year_filter,
'current_duplicate_filter': duplicate_filter,
'brickset_statuses': BrickSetStatusList.list(),
**set_metadata_lists(as_class=True) **set_metadata_lists(as_class=True)
) }
if pagination_context:
template_context['pagination'] = pagination_context
return render_template('sets.html', **template_context)
# Change the value of purchase date # Change the value of purchase date
@@ -187,13 +259,44 @@ def deleted(*, id: str) -> str:
@set_page.route('/<id>/details', methods=['GET']) @set_page.route('/<id>/details', methods=['GET'])
@exception_handler(__file__) @exception_handler(__file__)
def details(*, id: str) -> str: def details(*, id: str) -> str:
return render_template( # Load the specific set
'set.html', item = BrickSet().select_specific(id)
item=BrickSet().select_specific(id),
open_instructions=request.args.get('open_instructions'), # Check if there are multiple instances of this set
brickset_statuses=BrickSetStatusList.list(all=True), all_instances = BrickSetList()
**set_metadata_lists(as_class=True) # Load all sets with metadata context for tags, owners, etc.
) filter_context = {
'owners': BrickSetOwnerList.as_columns(),
'statuses': BrickSetStatusList.as_columns(),
'tags': BrickSetTagList.as_columns(),
}
all_instances.list(do_theme=True, **filter_context)
# Find all instances with the same set number
same_set_instances = [
record for record in all_instances.records
if record.fields.set == item.fields.set
]
# If consolidation is enabled and multiple instances exist, show consolidated view
if current_app.config['SETS_CONSOLIDATION'] and len(same_set_instances) > 1:
return render_template(
'set.html',
item=item,
all_instances=same_set_instances,
open_instructions=request.args.get('open_instructions'),
brickset_statuses=BrickSetStatusList.list(all=True),
**set_metadata_lists(as_class=True)
)
else:
# Single instance or consolidation disabled, show normal view
return render_template(
'set.html',
item=item,
open_instructions=request.args.get('open_instructions'),
brickset_statuses=BrickSetStatusList.list(all=True),
**set_metadata_lists(as_class=True)
)
# Update problematic pieces of a set # Update problematic pieces of a set
@@ -242,6 +345,50 @@ def problem_part(
return jsonify({problem: amount}) return jsonify({problem: amount})
# Update checked state of parts during walkthrough
@set_page.route('/<id>/parts/<part>/<int:color>/<int:spare>/checked', defaults={'figure': None}, methods=['POST']) # noqa: E501
@set_page.route('/<id>/minifigures/<figure>/parts/<part>/<int:color>/<int:spare>/checked', methods=['POST']) # noqa: E501
@login_required
@exception_handler(__file__, json=True)
def checked_part(
*,
id: str,
figure: str | None,
part: str,
color: int,
spare: int,
) -> Response:
brickset = BrickSet().select_specific(id)
if figure is not None:
brickminifigure = BrickMinifigure().select_specific(brickset, figure)
else:
brickminifigure = None
brickpart = BrickPart().select_specific(
brickset,
part,
color,
spare,
minifigure=brickminifigure,
)
checked = brickpart.update_checked(request.json)
# Info
logger.info('Set {set} ({id}): updated part ({part} color: {color}, spare: {spare}, minifigure: {figure}) checked state to {checked}'.format( # noqa: E501
set=brickset.fields.set,
id=brickset.fields.id,
figure=figure,
part=brickpart.fields.part,
color=brickpart.fields.color,
spare=brickpart.fields.spare,
checked=checked
))
return jsonify({'checked': checked})
# Refresh a set # Refresh a set
@set_page.route('/refresh/<set>/', methods=['GET']) @set_page.route('/refresh/<set>/', methods=['GET'])
@set_page.route('/<id>/refresh', methods=['GET']) @set_page.route('/<id>/refresh', methods=['GET'])

View File

@@ -0,0 +1,194 @@
"""
Statistics views for BrickTracker
Provides statistics and analytics pages
"""
import logging
from flask import Blueprint, render_template, request, url_for, redirect, current_app
from flask_login import login_required
from werkzeug.wrappers.response import Response
from .exceptions import exception_handler
from ..statistics import BrickStatistics
logger = logging.getLogger(__name__)
statistics_page = Blueprint('statistics', __name__, url_prefix='/statistics')
@statistics_page.route('/', methods=['GET'])
@login_required
@exception_handler(__file__)
def overview() -> str:
"""Statistics overview page with metrics"""
stats = BrickStatistics()
# Get all statistics data
overview_stats = stats.get_overview()
theme_stats = stats.get_theme_statistics()
storage_stats = stats.get_storage_statistics()
purchase_location_stats = stats.get_purchase_location_statistics()
financial_summary = stats.get_financial_summary()
collection_summary = stats.get_collection_summary()
sets_by_year_stats = stats.get_sets_by_year_statistics()
purchases_by_year_stats = stats.get_purchases_by_year_statistics()
year_summary = stats.get_year_summary()
# Prepare chart data for visualization (only if charts are enabled)
chart_data = {}
if current_app.config['STATISTICS_SHOW_CHARTS']:
chart_data = prepare_chart_data(sets_by_year_stats, purchases_by_year_stats)
# Get filter parameters for clickable statistics
filter_type = request.args.get('filter_type')
filter_value = request.args.get('filter_value')
# If a filter is applied, redirect to sets page with appropriate filters
if filter_type and filter_value:
return redirect_to_filtered_sets(filter_type, filter_value)
return render_template(
'statistics.html',
overview=overview_stats,
theme_statistics=theme_stats,
storage_statistics=storage_stats,
purchase_location_statistics=purchase_location_stats,
financial_summary=financial_summary,
collection_summary=collection_summary,
sets_by_year_statistics=sets_by_year_stats,
purchases_by_year_statistics=purchases_by_year_stats,
year_summary=year_summary,
chart_data=chart_data,
title="Statistics Overview"
)
def redirect_to_filtered_sets(filter_type: str, filter_value: str) -> Response:
"""Redirect to sets page with appropriate filters based on statistics click"""
# Map filter types to sets page parameters
filter_mapping = {
'theme': {'theme': filter_value},
'storage': {'storage': filter_value},
'purchase_location': {'purchase_location': filter_value},
'has_price': {'has_price': '1'} if filter_value == '1' else {},
'missing_parts': {'status': 'has-missing'},
'damaged_parts': {'status': 'has-damaged'},
'has_storage': {'status': 'has-storage'},
'no_storage': {'status': '-has-storage'},
}
# Get the appropriate filter parameters
filter_params = filter_mapping.get(filter_type, {})
if filter_params:
return redirect(url_for('set.list', **filter_params))
else:
# Default fallback to sets page
return redirect(url_for('set.list'))
@statistics_page.route('/themes', methods=['GET'])
@login_required
@exception_handler(__file__)
def themes() -> str:
"""Detailed theme statistics page"""
stats = BrickStatistics()
theme_stats = stats.get_theme_statistics()
return render_template(
'statistics_themes.html',
theme_statistics=theme_stats,
title="Theme Statistics"
)
@statistics_page.route('/storage', methods=['GET'])
@login_required
@exception_handler(__file__)
def storage() -> str:
"""Detailed storage statistics page"""
stats = BrickStatistics()
storage_stats = stats.get_storage_statistics()
return render_template(
'statistics_storage.html',
storage_statistics=storage_stats,
title="Storage Statistics"
)
@statistics_page.route('/purchase-locations', methods=['GET'])
@login_required
@exception_handler(__file__)
def purchase_locations() -> str:
"""Detailed purchase location statistics page"""
stats = BrickStatistics()
purchase_stats = stats.get_purchase_location_statistics()
return render_template(
'statistics_purchase_locations.html',
purchase_location_statistics=purchase_stats,
title="Purchase Location Statistics"
)
def prepare_chart_data(sets_by_year_stats, purchases_by_year_stats):
"""Prepare data for Chart.js visualization"""
import json
# Get all years from both datasets
all_years = set()
# Add years from sets by year
if sets_by_year_stats:
for year_stat in sets_by_year_stats:
if 'year' in year_stat:
all_years.add(year_stat['year'])
# Add years from purchases by year
if purchases_by_year_stats:
for year_stat in purchases_by_year_stats:
if 'purchase_year' in year_stat:
all_years.add(int(year_stat['purchase_year']))
# Create sorted list of years
years = sorted(list(all_years))
# Initialize data arrays
sets_data = []
parts_data = []
minifigs_data = []
# Create lookup dictionaries for quick access
sets_by_year_lookup = {}
if sets_by_year_stats:
for year_stat in sets_by_year_stats:
if 'year' in year_stat:
sets_by_year_lookup[year_stat['year']] = year_stat
# Fill data arrays
for year in years:
# Get sets and parts data from sets_by_year
year_data = sets_by_year_lookup.get(year)
if year_data:
sets_data.append(year_data.get('total_sets') or 0)
parts_data.append(year_data.get('total_parts') or 0)
# Use actual minifigure count from the database
minifigs_data.append(year_data.get('total_minifigures') or 0)
else:
sets_data.append(0)
parts_data.append(0)
minifigs_data.append(0)
return {
'years': json.dumps(years),
'sets_data': json.dumps(sets_data),
'parts_data': json.dumps(parts_data),
'minifigs_data': json.dumps(minifigs_data)
}

View File

@@ -19,17 +19,24 @@ class BrickWishList(BrickRecordList[BrickWish]):
# Queries # Queries
select_query: str = 'wish/list/all' select_query: str = 'wish/list/all'
# All the wished sets # Implementation of abstract list method
def all(self, /) -> Self: def list(self, /, *, override_query: str | None = None, **context) -> None:
# Use provided order or default
order = context.pop('order', current_app.config['WISHES_DEFAULT_ORDER'])
# Load the wished sets from the database # Load the wished sets from the database
for record in self.select( for record in self.select(
order=current_app.config['WISHES_DEFAULT_ORDER'], override_query=override_query,
order=order,
owners=BrickWishOwnerList.as_columns(), owners=BrickWishOwnerList.as_columns(),
**context
): ):
brickwish = BrickWish(record=record) brickwish = BrickWish(record=record)
self.records.append(brickwish) self.records.append(brickwish)
# All the wished sets
def all(self, /) -> Self:
self.list()
return self return self
# Add a set to the wishlist # Add a set to the wishlist

View File

@@ -2,21 +2,15 @@ services:
bricktracker: bricktracker:
container_name: BrickTracker container_name: BrickTracker
restart: unless-stopped restart: unless-stopped
# image: gitea.baerentsen.space/frederikbaerentsen/bricktracker:dev
build: . build: .
ports: ports:
- "3334:3333" - "3334:3333"
volumes: volumes:
- ./local:/local - ./local:/app/data # Changed from ./local to ./data for consistency
- ./local/instructions:/app/static/instructions/
- ./local/minifigures:/app/static/minifigures/
- ./local/parts:/app/static/parts/
- ./local/sets:/app/static/sets/
environment: environment:
BK_DEBUG: true - BK_DEBUG=true
BK_DATABASE_PATH: /local/app.db # For local development, place .env in data/ folder
BK_INSTRUCTIONS_FOLDER: instructions # The app automatically detects and uses data/.env (no env_file needed)
BK_MINIFIGURES_FOLDER: minifigures # Uncomment below only if you keep .env in root for backward compatibility
BK_PARTS_FOLDER: parts # env_file: .env
BK_RETIRED_SETS_PATH: /local/retired_sets.csv
BK_SETS_FOLDER: sets
BK_THEMES_PATH: /local/themes.csv

View File

@@ -2,26 +2,14 @@ services:
bricktracker: bricktracker:
container_name: BrickTracker container_name: BrickTracker
restart: unless-stopped restart: unless-stopped
image: gitea.baerentsen.space/frederikbaerentsen/bricktracker:1.2.2 image: gitea.baerentsen.space/frederikbaerentsen/bricktracker:latest
ports: ports:
- "3333:3333" - "3333:3333"
volumes: volumes:
- data:/data/ - ./data:/app/data/
- instructions:/app/static/instructions/ # Configuration can be done via .env file
- minifigures:/app/static/minifigures/ # For new installations, place .env in data/ folder for persistence
- parts:/app/static/parts/ # For backward compatibility, .env in root is also supported
- sets:/app/static/sets/ # The app automatically detects and uses data/.env (priority) or .env (fallback)
# Or define those in your .env file # env_file: ".env" # Optional: Only needed if keeping .env in root for backward compatibility
environment:
BK_DATABASE_PATH: /data/app.db
BK_MINIFIGURES_FOLDER: minifigures
BK_RETIRED_SETS_PATH: /data/retired_sets.csv
BK_THEMES_PATH: /data/themes.csv
env_file: ".env"
volumes:
data:
instructions:
minifigures:
parts:
sets:

View File

@@ -66,7 +66,7 @@
| Variable | Purpose | Default | Required | | Variable | Purpose | Default | Required |
|----------|---------|----------|-----------| |----------|---------|----------|-----------|
| `BK_INSTRUCTIONS_FOLDER` | Instructions storage path | `instructions` | No | | `BK_INSTRUCTIONS_FOLDER` | Instructions storage path | `instructions` | No |
| `BK_MINIFIGURES_FOLDER` | Minifigures storage path | `minifigs` | No | | `BK_MINIFIGURES_FOLDER` | Minifigures storage path | `minifigures` | No |
| `BK_PARTS_FOLDER` | Parts storage path | `parts` | No | | `BK_PARTS_FOLDER` | Parts storage path | `parts` | No |
| `BK_SETS_FOLDER` | Sets storage path | `sets` | No | | `BK_SETS_FOLDER` | Sets storage path | `sets` | No |
| `BK_INSTRUCTIONS_ALLOWED_EXTENSIONS` | Allowed instruction file types | `.pdf` | No | | `BK_INSTRUCTIONS_ALLOWED_EXTENSIONS` | Allowed instruction file types | `.pdf` | No |

BIN
docs/images/1-3-new-01.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

BIN
docs/images/1-3-new-02.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

BIN
docs/images/1-3-new-03.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

BIN
docs/images/1-3-new-04.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 161 KiB

BIN
docs/images/1-3-new-05.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

BIN
docs/images/1-3-new-06.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 349 KiB

BIN
docs/images/1-3-new-07.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 730 KiB

BIN
docs/images/1-3-new-08.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 539 KiB

BIN
docs/images/1-3-new-09.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 748 KiB

BIN
docs/images/1-3-new-10.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 159 KiB

BIN
docs/images/inst-dl-02.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 411 KiB

BIN
docs/images/inst-dl.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

353
docs/migration_guide.md Normal file
View File

@@ -0,0 +1,353 @@
# Data Folder Migration Guide (Docker Compose)
## Overview
Starting with version 1.3, BrickTracker consolidates all user data into a single `data/` folder for easier backup, persistence, and volume mapping.
**This guide assumes you are running BrickTracker using Docker Compose with bind mounts.**
> **Note:** If you're using Docker named volumes instead of bind mounts, you'll need to manually copy data between volumes. The commands below are specific to bind mount setups.
**Backup your data before to making any changes!**
## What Changed?
### New Default Structure (v1.3+)
**All relative paths are resolved relative to `/app` inside the container.** Previously all paths were relative to `/app/static`.
For example: `data/app.db``/app/data/app.db`
```
Container (/app/):
├── data/ # NEW: Single volume mount for all user data
│ ├── .env # Configuration (recommended location)
│ ├── app.db # Database
│ ├── retired_sets.csv # Downloaded CSV files
│ ├── themes.csv
│ ├── sets/ # Set images
│ ├── parts/ # Part images
│ ├── minifigures/ # Minifigure images
│ └── instructions/ # PDF instructions
└── static/ # App assets
├── brick.png
├── styles.css
└── scripts/
```
**Docker Compose volume:** Single mount `./data:/app/data/`
### Previous Structure (v1.2 and earlier)
```
Container (/app/):
├── app.db # Mounted from ./data/ on host
├── retired_sets.csv # Mounted from ./data/ on host
├── themes.csv
└── static/
├── instructions/ # Separate bind mount
├── minifigs/ # Separate bind mount
├── parts/ # Separate bind mount
├── sets/ # Separate bind mount
```
**Docker Compose bind mounts:** 5 separate mounts
```yaml
volumes:
- ./data:/app/
- ./instructions:/app/static/instructions/
- ./minifigs:/app/static/minifigs/
- ./parts:/app/static/parts/
- ./sets:/app/static/sets/
```
## Migration Options
> **Warning**
> Backup your data before making any changes!
### Option 1: Migrate to New Data Folder Structure (Recommended)
This is the recommended approach for cleaner backups and simpler bind mount management.
1. **Stop the container:**
```bash
docker compose down
```
2. **Create new consolidated data directory on host:**
```bash
mkdir -p ./bricktracker-data/{sets,parts,minifigures,instructions}
```
3. **Move data from old bind mount locations to new structure:**
Assuming your old `compose.yaml` had:
- `./data:/app/` (contains app.db, retired_sets.csv, themes.csv)
- `./instructions:/app/static/instructions/`
- `./minifigs:/app/static/minifigs/`
- `./parts:/app/static/parts/`
- `./sets:/app/static/sets/`
> Default location for minifigures changed from `minifigs` to `minifigures`
Run:
```bash
# Move configuration file (optional but recommended)
mv .env ./bricktracker-data/.env
# Move database and CSV files
mv ./data/app.db ./bricktracker-data/
mv ./data/retired_sets.csv ./bricktracker-data/
mv ./data/themes.csv ./bricktracker-data/
# Move image and instruction folders
mv ./instructions/* ./bricktracker-data/instructions/
mv ./minifigs/* ./bricktracker-data/minifigures/
mv ./parts/* ./bricktracker-data/parts/
mv ./sets/* ./bricktracker-data/sets/
```
4. **Update `compose.yaml` to use single bind mount:**
```yaml
services:
bricktracker:
volumes:
- ./bricktracker-data:/app/data/
# Remove old volume mounts and env_file (if .env was moved to data/)
```
5. **Remove old path overrides from `.env` (if present):**
Delete any lines starting with:
- `BK_DATABASE_PATH=`
- `BK_INSTRUCTIONS_FOLDER=`
- `BK_MINIFIGURES_FOLDER=`
- `BK_PARTS_FOLDER=`
- `BK_SETS_FOLDER=`
- `BK_RETIRED_SETS_PATH=`
- `BK_THEMES_PATH=`
6. **Start the container:**
```bash
docker compose up -d
```
7. **Verify everything works:**
```bash
docker compose logs -f bricktracker
# Check the web interface to ensure images/data are loading
```
8. **Clean up old directories (after verification):**
```bash
rm -r ./data ./instructions ./minifigs ./parts ./sets
```
### Option 2: Keep Current Setup (No Data Migration)
If you want to keep your current volume structure without moving any files:
1. **Add these environment variables to your `.env` file:**
```env
# Keep database and CSV files in /data volume (old location)
BK_DATABASE_PATH=app.db
BK_RETIRED_SETS_PATH=retired_sets.csv
BK_THEMES_PATH=themes.csv
# Keep image/instruction folders in static/ (old location)
BK_INSTRUCTIONS_FOLDER=static/instructions
BK_MINIFIGURES_FOLDER=static/minifigures
BK_PARTS_FOLDER=static/parts
BK_SETS_FOLDER=static/sets
```
2. **Keep your existing volume mounts in `compose.yaml`:**
```yaml
volumes:
- ./data:/app/
- ./instructions:/app/static/instructions/
- ./minifigures:/app/static/minifigures/
- ./parts:/app/static/parts/
- ./sets:/app/static/sets/
```
3. **Update to v1.3 and restart:**
```bash
docker compose pull
docker compose up -d
```
That's it! Your existing setup will continue to work.
## Configuration File (.env) Location
### New Behavior (v1.3+)
BrickTracker now supports `.env` in two locations with automatic detection:
1. **data/.env** (recommended - new location)
- Included in data volume backup
- Settings persist when changed via admin panel
- Priority location (checked first)
- **No `env_file` needed** - app reads it directly from `/app/data/.env`
2. **.env** (backward compatibility - root)
- Continues to work for existing installations
- Requires `env_file: .env` in compose.yaml for Docker to load it at startup
- Not included in data volume (unless you add `.env` to `data/`)
### Migration Steps for .env
**Option A: Move to data folder (recommended)**
```bash
# Move .env to data folder
mv .env data/.env
# Update compose.yaml - remove or comment out env_file
# The app will automatically find and use /app/data/.env
```
**Option B: Keep in root (backward compatible)**
```bash
# No changes needed
# Keep env_file: .env in compose.yaml
# App will use .env from root as fallback
```
**Note:** The application automatically detects which location has the .env file at runtime. No Docker Compose `env_file` directive is needed for `data/.env` because the app reads it directly from the mounted volume.
## Configuration Reference
### New Default Paths (v1.3+)
All paths are relative to `/app` inside the container.
| Config Variable | Default Value | Resolves To (Container) | Description |
|----------------|---------------|------------------------|-------------|
| `BK_DATABASE_PATH` | `data/app.db` | `/app/data/app.db` | Database file |
| `BK_RETIRED_SETS_PATH` | `data/retired_sets.csv` | `/app/data/retired_sets.csv` | Retired sets CSV |
| `BK_THEMES_PATH` | `data/themes.csv` | `/app/data/themes.csv` | Themes CSV |
| `BK_INSTRUCTIONS_FOLDER` | `data/instructions` | `/app/data/instructions` | PDF instructions |
| `BK_MINIFIGURES_FOLDER` | `data/minifigures` | `/app/data/minifigures` | Minifigure images |
| `BK_PARTS_FOLDER` | `data/parts` | `/app/data/parts` | Part images |
| `BK_SETS_FOLDER` | `data/sets` | `/app/data/sets` | Set images |
**Docker Compose bind mount:** `./bricktracker-data:/app/data/` (single mount)
### Old Paths (v1.2 and earlier)
To preserve old volume structure without migration, add to `.env`:
| Config Variable | Value to Preserve Old Behavior | Resolves To (Container) |
|----------------|-------------------------------|------------------------|
| `BK_DATABASE_PATH` | `app.db` | `/app/app.db` |
| `BK_RETIRED_SETS_PATH` | `retired_sets.csv` | `/app/retired_sets.csv` |
| `BK_THEMES_PATH` | `themes.csv` | `/app/themes.csv` |
| `BK_INSTRUCTIONS_FOLDER` | `static/instructions` | `/app/static/instructions` |
| `BK_MINIFIGURES_FOLDER` | `static/minifigs` | `/app/static/minifigs` |
| `BK_PARTS_FOLDER` | `static/parts` | `/app/static/parts` |
| `BK_SETS_FOLDER` | `static/sets` | `/app/static/sets` |
## Benefits of New Structure
1. **Single Bind Mount**: One `./bricktracker-data:/app/data/` mount instead of five separate mounts
2. **Easier Backups**: All user data in one location - just backup the `bricktracker-data` directory
3. **Cleaner Separation**: User data separated from application assets
4. **Better Portability**: Migrate between systems by copying/moving single directory
## Troubleshooting
### Images/Instructions Not Loading After Migration
1. **Check if data was copied correctly:**
```bash
docker compose exec bricktracker ls -la /app/data/
docker compose exec bricktracker ls -la /app/data/sets/
docker compose exec bricktracker ls -la /app/data/instructions/
```
2. **Verify bind mount:**
```bash
docker compose config
# Should show: volumes: - ./bricktracker-data:/app/data/
```
3. **Check logs for path errors:**
```bash
docker compose logs -f
```
4. **Verify no old environment overrides:**
```bash
cat .env | grep BK_
```
### Database Not Found
1. **Check database file location in container:**
```bash
docker compose exec bricktracker ls -la /app/data/app.db
```
2. **If using old setup, verify environment variables:**
```bash
docker compose exec bricktracker env | grep BK_DATABASE_PATH
```
3. **Check host directory contains database:**
```bash
ls -la ./bricktracker-data/
# Should show: app.db, retired_sets.csv, themes.csv, and subdirectories
```
### Settings Don't Persist After Restart
**Error:** Admin panel changes revert after `docker compose restart`
**Solution:**
This happens when `.env` is not in a volume. Choose one:
**Option A: Move .env to data folder**
```bash
mv .env data/.env
# Update compose.yaml - remove or comment out env_file
# The app will automatically find and use /app/data/.env
```
**Option B: Mount .env as volume**
```yaml
volumes:
- ./.env:/app/.env
```
### Permission Errors
If you see permission errors after migration:
```bash
# Fix permissions on bind-mounted directory
sudo chown -R $(id -u):$(id -g) ./bricktracker-data
```
**Permission denied writing .env:**
If the admin panel shows an error when saving settings:
1. Ensure .env file is writable by container user
2. If using volume mount, check host file permissions
3. In container: `docker exec BrickTracker ls -la /app/.env` or `/app/data/.env`
### Reverting Migration
If you need to revert to the old structure:
1. Stop the container: `docker compose down`
2. Restore old `compose.yaml` with 5 volume mounts
3. Add old path environment variables to `.env` (see Option 1)
4. Start: `docker compose up -d`

View File

@@ -13,4 +13,4 @@ then
fi fi
# Execute the WSGI server # Execute the WSGI server
gunicorn --bind "${BK_HOST}:${BK_PORT}" "wsgi:application" --worker-class "gevent" --workers 1 "$@" exec gunicorn --bind "${BK_HOST}:${BK_PORT}" "wsgi:application" --worker-class "gevent" --workers 1 "$@"

View File

@@ -10,4 +10,5 @@ rebrick
requests requests
tzdata tzdata
bs4 bs4
cloudscraper fpdf2
pillow

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 65 KiB

BIN
static/brick2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -0,0 +1,325 @@
// Admin Configuration Management
// Handles live environment variable configuration interface
// Initialize form values with current configuration
function initializeConfigValues() {
console.log('Initializing config values with:', window.CURRENT_CONFIG);
Object.keys(window.CURRENT_CONFIG).forEach(varName => {
const value = window.CURRENT_CONFIG[varName];
console.log(`Setting ${varName} = ${value}`);
// Handle live settings (checkboxes and inputs)
const liveToggle = document.getElementById(varName);
if (liveToggle && liveToggle.type === 'checkbox') {
liveToggle.checked = value === true;
console.log(`Set checkbox ${varName} to ${value}`);
}
const liveInputs = document.querySelectorAll(`input[data-var="${varName}"]:not(.config-static)`);
liveInputs.forEach(input => {
if (input.type !== 'checkbox') {
input.value = value !== null && value !== undefined ? value : '';
console.log(`Set input ${varName} to ${input.value}`);
}
});
// Handle static settings
const staticToggle = document.getElementById(`static-${varName}`);
if (staticToggle && staticToggle.type === 'checkbox') {
staticToggle.checked = value === true;
console.log(`Set static checkbox ${varName} to ${value}`);
}
const staticInputs = document.querySelectorAll(`input[data-var="${varName}"].config-static`);
staticInputs.forEach(input => {
if (input.type !== 'checkbox') {
input.value = value !== null && value !== undefined ? value : '';
console.log(`Set static input ${varName} to ${input.value}`);
}
});
});
}
// Handle config change events
function handleConfigChange(element) {
const varName = element.dataset.var;
let newValue;
if (element.type === 'checkbox') {
newValue = element.checked;
} else if (element.type === 'number') {
newValue = parseInt(element.value) || 0;
} else {
newValue = element.value;
}
// Update the badge display
updateConfigBadge(varName, newValue);
// Note: Changes are only saved when "Save All Changes" button is clicked
}
// Update badge display
function updateConfigBadge(varName, value) {
const defaultValue = window.DEFAULT_CONFIG[varName];
const isChanged = JSON.stringify(value) !== JSON.stringify(defaultValue);
// Remove existing badges but keep them inline
const existingBadges = document.querySelectorAll(`[data-badge-var="${varName}"]`);
existingBadges.forEach(badge => {
badge.remove();
});
// Find the label where we should insert new badges
const label = document.querySelector(`label[for="${varName}"], label[for="static-${varName}"]`);
if (!label) return;
// Find the description div (with .text-muted class) to insert badges before it
const descriptionDiv = label.querySelector('.text-muted');
// Create value badge based on new logic
let valueBadge;
if (value === true) {
valueBadge = document.createElement('span');
valueBadge.className = 'badge rounded-pill text-bg-success ms-2';
valueBadge.textContent = 'True';
valueBadge.setAttribute('data-badge-var', varName);
valueBadge.setAttribute('data-badge-type', 'value');
} else if (value === false) {
valueBadge = document.createElement('span');
valueBadge.className = 'badge rounded-pill text-bg-danger ms-2';
valueBadge.textContent = 'False';
valueBadge.setAttribute('data-badge-var', varName);
valueBadge.setAttribute('data-badge-type', 'value');
} else if (JSON.stringify(value) === JSON.stringify(defaultValue)) {
valueBadge = document.createElement('span');
valueBadge.className = 'badge rounded-pill text-bg-light text-dark ms-2';
valueBadge.textContent = `Default: ${defaultValue}`;
valueBadge.setAttribute('data-badge-var', varName);
valueBadge.setAttribute('data-badge-type', 'value');
} else {
// For text/number fields that have been changed, show "Default: X"
valueBadge = document.createElement('span');
valueBadge.className = 'badge rounded-pill text-bg-light text-dark ms-2';
valueBadge.textContent = `Default: ${defaultValue}`;
valueBadge.setAttribute('data-badge-var', varName);
valueBadge.setAttribute('data-badge-type', 'value');
}
// Insert badge before the description div (to keep it on same line as title)
if (descriptionDiv) {
label.insertBefore(valueBadge, descriptionDiv);
} else {
label.appendChild(valueBadge);
}
// Add changed badge if needed
if (isChanged) {
const changedBadge = document.createElement('span');
changedBadge.className = 'badge rounded-pill text-bg-warning ms-1';
changedBadge.textContent = 'Changed';
changedBadge.setAttribute('data-badge-var', varName);
changedBadge.setAttribute('data-badge-type', 'changed');
// Insert changed badge after the value badge
if (descriptionDiv) {
label.insertBefore(changedBadge, descriptionDiv);
} else {
label.appendChild(changedBadge);
}
}
}
// Handle static config save
function saveStaticConfig() {
const staticInputs = document.querySelectorAll('.config-static, .config-static-toggle');
const updates = {};
staticInputs.forEach(input => {
const varName = input.dataset.var;
let value;
if (input.type === 'checkbox') {
value = input.checked;
} else {
value = input.value;
}
updates[varName] = value;
});
console.log('Saving static config:', updates);
// Send to backend via fetch API
fetch('/admin/api/config/update-static', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ updates: updates })
})
.then(response => response.json())
.then(data => {
const statusContainer = document.getElementById('config-status');
if (statusContainer) {
if (data.status === 'success') {
statusContainer.innerHTML = '<div class="alert alert-success"><i class="ri-check-line"></i> Static configuration saved to .env file!</div>';
setTimeout(() => {
statusContainer.innerHTML = '';
}, 3000);
} else {
statusContainer.innerHTML = `<div class="alert alert-danger"><i class="ri-error-warning-line"></i> Error: ${data.message || 'Failed to save static configuration'}</div>`;
}
}
})
.catch(error => {
console.error('Save static config error:', error);
const statusContainer = document.getElementById('config-status');
if (statusContainer) {
statusContainer.innerHTML = '<div class="alert alert-danger"><i class="ri-error-warning-line"></i> Error: Failed to save static configuration</div>';
}
});
}
// Handle button functionality
function setupButtonHandlers() {
// Save All Changes button
const saveAllBtn = document.getElementById('config-save-all');
if (saveAllBtn) {
saveAllBtn.addEventListener('click', () => {
console.log('Save All Changes clicked');
saveLiveConfiguration();
});
}
// Refresh button
const refreshBtn = document.getElementById('config-refresh');
if (refreshBtn) {
refreshBtn.addEventListener('click', () => {
console.log('Refresh clicked');
location.reload();
});
}
// Reset button
const resetBtn = document.getElementById('config-reset');
if (resetBtn) {
resetBtn.addEventListener('click', () => {
console.log('Reset clicked');
if (confirm('Are you sure you want to reset all settings to default values? This action cannot be undone.')) {
resetToDefaults();
}
});
}
// Static config save button
const saveStaticBtn = document.getElementById('config-save-static');
if (saveStaticBtn) {
saveStaticBtn.addEventListener('click', saveStaticConfig);
}
}
// Save live configuration changes
function saveLiveConfiguration() {
const liveInputs = document.querySelectorAll('.config-toggle, .config-number, .config-text');
const updates = {};
liveInputs.forEach(input => {
const varName = input.dataset.var;
let value;
if (input.type === 'checkbox') {
value = input.checked;
} else if (input.type === 'number') {
value = parseInt(input.value) || 0;
} else {
value = input.value;
}
updates[varName] = value;
});
console.log('Saving live configuration:', updates);
// Show status message
const statusContainer = document.getElementById('config-status');
if (statusContainer) {
statusContainer.innerHTML = '<div class="alert alert-info"><i class="ri-loader-4-line"></i> Saving configuration...</div>';
}
// Send to backend via fetch API
fetch('/admin/api/config/update', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ updates: updates })
})
.then(response => response.json())
.then(data => {
if (statusContainer) {
if (data.status === 'success') {
statusContainer.innerHTML = '<div class="alert alert-success"><i class="ri-check-line"></i> Configuration saved successfully! Reloading page...</div>';
// Reload the page after a short delay
setTimeout(() => {
location.reload();
}, 1000);
} else {
statusContainer.innerHTML = `<div class="alert alert-danger"><i class="ri-error-warning-line"></i> Error: ${data.message || 'Failed to save configuration'}</div>`;
}
}
})
.catch(error => {
console.error('Save error:', error);
if (statusContainer) {
statusContainer.innerHTML = '<div class="alert alert-danger"><i class="ri-error-warning-line"></i> Error: Failed to save configuration</div>';
}
});
}
// Reset all settings to defaults
function resetToDefaults() {
console.log('Resetting to defaults');
// Reset all form inputs
document.querySelectorAll('.config-toggle, .config-number, .config-text').forEach(input => {
if (input.type === 'checkbox') {
input.checked = false;
} else {
input.value = '';
}
});
// Update badges
Object.keys(window.CURRENT_CONFIG).forEach(varName => {
updateConfigBadge(varName, null);
});
// Show status message
const statusContainer = document.getElementById('config-status');
if (statusContainer) {
statusContainer.innerHTML = '<div class="alert alert-warning"><i class="ri-restart-line"></i> Settings reset to defaults. Click "Save All Changes" to apply.</div>';
}
}
// Initialize when DOM is ready
document.addEventListener("DOMContentLoaded", () => {
console.log('DOM loaded, initializing configuration interface');
// Initialize form values
initializeConfigValues();
// Setup button handlers
setupButtonHandlers();
// Set up event listeners for form changes
document.addEventListener('change', (e) => {
if (e.target.matches('[data-var]')) {
handleConfigChange(e.target);
}
});
console.log('Configuration interface initialized - ready for API calls');
});

View File

@@ -0,0 +1,515 @@
/**
* Shared collapsible state management for filters and sort sections
* Handles BK_SHOW_GRID_FILTERS and BK_SHOW_GRID_SORT configuration with user preferences
*/
// Generic state management for collapsible sections (filter and sort)
function initializeCollapsibleState(elementId, storageKey) {
const element = document.getElementById(elementId);
const toggleButton = document.querySelector(`[data-bs-target="#${elementId}"]`);
if (!element || !toggleButton) return;
// Restore state on page load
const savedState = sessionStorage.getItem(storageKey);
if (savedState === 'open') {
// User explicitly opened it
element.classList.add('show');
toggleButton.setAttribute('aria-expanded', 'true');
} else if (savedState === 'closed') {
// User explicitly closed it, override template state
element.classList.remove('show');
toggleButton.setAttribute('aria-expanded', 'false');
}
// If no saved state, keep the template state (respects BK_SHOW_GRID_FILTERS/BK_SHOW_GRID_SORT)
// Listen for toggle events
element.addEventListener('show.bs.collapse', () => {
sessionStorage.setItem(storageKey, 'open');
});
element.addEventListener('hide.bs.collapse', () => {
sessionStorage.setItem(storageKey, 'closed');
});
}
// Initialize filter and sort states for a specific page
function initializePageCollapsibleStates(pagePrefix, filterElementId = 'table-filter', sortElementId = 'table-sort') {
initializeCollapsibleState(filterElementId, `${pagePrefix}-filter-state`);
initializeCollapsibleState(sortElementId, `${pagePrefix}-sort-state`);
// Initialize sort icons based on current URL parameters (for all pages)
const urlParams = new URLSearchParams(window.location.search);
const currentSort = urlParams.get('sort');
const currentOrder = urlParams.get('order');
if (currentSort || currentOrder) {
updateSortIcon(currentOrder);
}
}
// Shared function to preserve filter state during filter changes
function preserveCollapsibleStateOnChange(elementId, storageKey) {
const element = document.getElementById(elementId);
const wasOpen = element && element.classList.contains('show');
// Store the state to restore after page reload
if (wasOpen) {
sessionStorage.setItem(storageKey, 'open');
}
}
// Setup color dropdown with visual indicators (shared implementation)
function setupColorDropdown() {
const colorSelect = document.getElementById('filter-color');
if (!colorSelect) return;
// Merge duplicate color options where one has color_rgb and the other is None
const colorMap = new Map();
const allOptions = colorSelect.querySelectorAll('option[data-color-id]');
// First pass: collect all options by color_id
allOptions.forEach(option => {
const colorId = option.dataset.colorId;
const colorRgb = option.dataset.colorRgb;
const colorName = option.textContent.trim();
if (!colorMap.has(colorId)) {
colorMap.set(colorId, []);
}
colorMap.get(colorId).push({
element: option,
colorRgb: colorRgb,
colorName: colorName,
selected: option.selected
});
});
// Second pass: merge duplicates, keeping the one with color_rgb
colorMap.forEach((options, colorId) => {
if (options.length > 1) {
// Find option with color_rgb (not empty/null/undefined)
const withRgb = options.find(opt => opt.colorRgb && opt.colorRgb !== 'None' && opt.colorRgb !== '');
const withoutRgb = options.find(opt => !opt.colorRgb || opt.colorRgb === 'None' || opt.colorRgb === '');
if (withRgb && withoutRgb) {
// Keep the selected state from either option
const wasSelected = withRgb.selected || withoutRgb.selected;
// Update the option with RGB to be selected if either was selected
if (wasSelected) {
withRgb.element.selected = true;
}
// Remove the option without RGB
withoutRgb.element.remove();
}
}
});
// Add color squares to remaining option text
const remainingOptions = colorSelect.querySelectorAll('option[data-color-rgb]');
remainingOptions.forEach(option => {
const colorRgb = option.dataset.colorRgb;
const colorId = option.dataset.colorId;
const colorName = option.textContent.trim();
if (colorRgb && colorRgb !== 'None' && colorRgb !== '' && colorId !== '9999') {
// Create a visual indicator (using Unicode square)
option.textContent = `${colorName}`; //■
//option.style.color = `#${colorRgb}`;
}
});
}
// Check if pagination mode is enabled for a specific table
function isPaginationModeForTable(tableId) {
const tableElement = document.querySelector(`#${tableId}`);
return tableElement && tableElement.getAttribute('data-table') === 'false';
}
// Update sort icon based on current sort direction
function updateSortIcon(sortDirection = null) {
// Find the main sort icon (could be in grid-sort or table-sort)
const sortIcon = document.querySelector('#grid-sort .ri-sort-asc, #grid-sort .ri-sort-desc, #table-sort .ri-sort-asc, #table-sort .ri-sort-desc');
if (!sortIcon) return;
// Remove existing sort classes
sortIcon.classList.remove('ri-sort-asc', 'ri-sort-desc');
// Add appropriate class based on sort direction
if (sortDirection === 'desc') {
sortIcon.classList.add('ri-sort-desc');
} else {
sortIcon.classList.add('ri-sort-asc');
}
}
// Initialize sort button states and icons for pagination mode
window.initializeSortButtonStates = function(currentSort, currentOrder) {
const sortButtons = document.querySelectorAll('[data-sort-attribute]');
// Update main sort icon
updateSortIcon(currentOrder);
if (currentSort) {
sortButtons.forEach(btn => {
// Clear all buttons first
btn.classList.remove('btn-primary');
btn.classList.add('btn-outline-primary');
btn.removeAttribute('data-current-direction');
// Set active state for current sort
if (btn.dataset.sortAttribute === currentSort) {
btn.classList.remove('btn-outline-primary');
btn.classList.add('btn-primary');
btn.dataset.currentDirection = currentOrder || 'asc';
}
});
}
};
// Shared sort button setup function
window.setupSharedSortButtons = function(tableId, tableInstanceGlobal, columnMap) {
const sortButtons = document.querySelectorAll('[data-sort-attribute]');
const clearButton = document.querySelector('[data-sort-clear]');
const isPaginationMode = isPaginationModeForTable(tableId);
sortButtons.forEach(button => {
button.addEventListener('click', () => {
const attribute = button.dataset.sortAttribute;
const isDesc = button.dataset.sortDesc === 'true';
if (isPaginationMode) {
// PAGINATION MODE - Server-side sorting via URL parameters
const currentUrl = new URL(window.location);
const currentSort = currentUrl.searchParams.get('sort');
const currentOrder = currentUrl.searchParams.get('order');
const isCurrentlyActive = currentSort === attribute;
let newDirection;
if (isCurrentlyActive) {
// Toggle direction if same attribute
newDirection = currentOrder === 'asc' ? 'desc' : 'asc';
} else {
// Use default direction for new attribute
newDirection = isDesc ? 'desc' : 'asc';
}
// Set sort parameters and reset to first page
currentUrl.searchParams.set('sort', attribute);
currentUrl.searchParams.set('order', newDirection);
currentUrl.searchParams.set('page', '1');
// Navigate to sorted results
window.location.href = currentUrl.toString();
} else {
// ORIGINAL MODE - Client-side sorting via Simple DataTables
const columnIndex = columnMap[attribute];
const tableInstance = window[tableInstanceGlobal];
if (columnIndex !== undefined && tableInstance) {
// Determine sort direction
const isCurrentlyActive = button.classList.contains('btn-primary');
const currentDirection = button.dataset.currentDirection || (isDesc ? 'desc' : 'asc');
const newDirection = isCurrentlyActive ?
(currentDirection === 'asc' ? 'desc' : 'asc') :
(isDesc ? 'desc' : 'asc');
// Clear other active buttons
sortButtons.forEach(btn => {
btn.classList.remove('btn-primary');
btn.classList.add('btn-outline-primary');
btn.removeAttribute('data-current-direction');
});
// Mark this button as active
button.classList.remove('btn-outline-primary');
button.classList.add('btn-primary');
button.dataset.currentDirection = newDirection;
// Apply sort using Simple DataTables API
tableInstance.table.columns.sort(columnIndex, newDirection);
// Update sort icon to reflect new direction
updateSortIcon(newDirection);
}
}
});
});
if (clearButton) {
clearButton.addEventListener('click', () => {
if (isPaginationMode) {
// PAGINATION MODE - Clear server-side sorting via URL parameters
const currentUrl = new URL(window.location);
currentUrl.searchParams.delete('sort');
currentUrl.searchParams.delete('order');
currentUrl.searchParams.set('page', '1');
window.location.href = currentUrl.toString();
} else {
// ORIGINAL MODE - Clear client-side sorting
// Clear all sort buttons
sortButtons.forEach(btn => {
btn.classList.remove('btn-primary');
btn.classList.add('btn-outline-primary');
btn.removeAttribute('data-current-direction');
});
// Reset sort icon to default ascending
updateSortIcon('asc');
// Reset table sort - remove all sorting
const tableInstance = window[tableInstanceGlobal];
if (tableInstance) {
const tableElement = document.querySelector(`#${tableId}`);
const currentPerPage = tableInstance.table.options.perPage;
tableInstance.table.destroy();
setTimeout(() => {
// Create new instance using the globally available BrickTable class
const newInstance = new window.BrickTable(tableElement, currentPerPage);
window[tableInstanceGlobal] = newInstance;
// Re-enable search functionality
newInstance.table.searchable = true;
}, 50);
}
}
});
}
};
// =================================================================
// SHARED FUNCTIONS FOR PAGE-SPECIFIC OPERATIONS
// =================================================================
// Shared pagination mode detection (works for any table/grid ID)
window.isPaginationModeForPage = function(elementId, attributeName = 'data-table') {
const element = document.querySelector(`#${elementId}`);
return element && element.getAttribute(attributeName) === 'false';
};
// Shared URL parameter update helper
window.updateUrlParams = function(params, resetPage = true) {
const currentUrl = new URL(window.location);
// Apply parameter updates
Object.entries(params).forEach(([key, value]) => {
if (value === null || value === undefined || value === '' || value === 'all') {
currentUrl.searchParams.delete(key);
} else {
currentUrl.searchParams.set(key, value);
}
});
// Reset to page 1 if requested
if (resetPage) {
currentUrl.searchParams.set('page', '1');
}
// Navigate to updated URL
window.location.href = currentUrl.toString();
};
// Shared filter application (supports owner, color, theme, year, storage, tag, and problems filters)
window.applyPageFilters = function(tableId) {
const ownerSelect = document.getElementById('filter-owner');
const colorSelect = document.getElementById('filter-color');
const themeSelect = document.getElementById('filter-theme');
const yearSelect = document.getElementById('filter-year');
const storageSelect = document.getElementById('filter-storage');
const tagSelect = document.getElementById('filter-tag');
const problemsSelect = document.getElementById('filter-problems');
const params = {};
// Handle owner filter
if (ownerSelect) {
params.owner = ownerSelect.value;
}
// Handle color filter
if (colorSelect) {
params.color = colorSelect.value;
}
// Handle theme filter
if (themeSelect) {
params.theme = themeSelect.value;
}
// Handle year filter
if (yearSelect) {
params.year = yearSelect.value;
}
// Handle storage filter
if (storageSelect) {
params.storage = storageSelect.value;
}
// Handle tag filter
if (tagSelect) {
params.tag = tagSelect.value;
}
// Handle problems filter (for minifigures page)
if (problemsSelect) {
params.problems = problemsSelect.value;
}
// Check if we're in pagination mode
const isPaginationMode = window.isPaginationModeForPage(tableId);
// Update URL with new parameters
// Only reset to page 1 if in server-side pagination mode
window.updateUrlParams(params, isPaginationMode);
};
// Shared search setup for both pagination and client-side modes
window.setupPageSearch = function(tableId, searchInputId, clearButtonId, tableInstanceGlobal) {
const searchInput = document.getElementById(searchInputId);
const searchClear = document.getElementById(clearButtonId);
if (!searchInput || !searchClear) return;
const isPaginationMode = window.isPaginationModeForPage(tableId);
if (isPaginationMode) {
// PAGINATION MODE - Server-side search with Enter key
searchInput.addEventListener('keypress', (e) => {
if (e.key === 'Enter') {
e.preventDefault();
const searchValue = e.target.value.trim();
window.updateUrlParams({ search: searchValue }, true);
}
});
// Clear search
searchClear.addEventListener('click', () => {
searchInput.value = '';
window.updateUrlParams({ search: null }, true);
});
} else {
// ORIGINAL MODE - Client-side instant search via Simple DataTables
const setupClientSearch = () => {
const tableElement = document.querySelector(`table[data-table="true"]#${tableId}`);
const tableInstance = window[tableInstanceGlobal];
if (tableElement && tableInstance) {
// Enable search functionality
tableInstance.table.searchable = true;
// Instant search as user types
searchInput.addEventListener('input', (e) => {
const searchValue = e.target.value.trim();
tableInstance.table.search(searchValue);
});
// Clear search
searchClear.addEventListener('click', () => {
searchInput.value = '';
tableInstance.table.search('');
});
} else {
// If table instance not ready, try again
setTimeout(setupClientSearch, 100);
}
};
setTimeout(setupClientSearch, 100);
}
};
// Shared function to preserve filter state and apply filters
window.applyFiltersAndKeepState = function(tableId, storageKey) {
const filterElement = document.getElementById('table-filter');
const wasOpen = filterElement && filterElement.classList.contains('show');
// Apply the filters
window.applyPageFilters(tableId);
// Store the state to restore after page reload
if (wasOpen) {
sessionStorage.setItem(storageKey, 'open');
}
};
// Shared function to clear all filters for a page (works in both pagination and client-side modes)
window.clearPageFilters = function(tableId, filterParams) {
const isPaginationMode = window.isPaginationModeForPage(tableId);
if (isPaginationMode) {
// SERVER-SIDE PAGINATION MODE: Remove all filter parameters and redirect to base URL
const currentUrl = new URL(window.location);
// Remove all filter parameters
filterParams.forEach(param => {
currentUrl.searchParams.delete(param);
});
// Also remove page parameter to go back to clean base URL
currentUrl.searchParams.delete('page');
// Navigate to cleaned URL (will be just /xxx if no other params)
window.location.href = currentUrl.toString();
} else {
// CLIENT-SIDE MODE: Reset all filter dropdowns to "all"
filterParams.forEach(param => {
const select = document.getElementById(`filter-${param}`);
if (select) {
select.value = 'all';
}
});
// Remove page parameter from URL if present (without reloading)
const currentUrl = new URL(window.location);
if (currentUrl.searchParams.has('page')) {
currentUrl.searchParams.delete('page');
window.history.replaceState({}, '', currentUrl.toString());
}
// Trigger filter application (will use existing filter logic)
window.applyPageFilters(tableId);
}
};
// Shared initialization for table pages (parts, problems, minifigures)
window.initializeTablePage = function(config) {
const {
pagePrefix, // e.g., 'parts', 'problems', 'minifigures'
tableId, // e.g., 'parts', 'problems', 'minifigures'
searchInputId = 'table-search',
clearButtonId = 'table-search-clear',
tableInstanceGlobal, // e.g., 'partsTableInstance', 'problemsTableInstance'
sortColumnMap, // Column mapping for sort buttons
hasColorDropdown = true
} = config;
// Initialize collapsible states (filter and sort)
initializePageCollapsibleStates(pagePrefix);
// Setup search functionality
window.setupPageSearch(tableId, searchInputId, clearButtonId, tableInstanceGlobal);
// Setup color dropdown if needed
if (hasColorDropdown) {
setupColorDropdown();
}
// Setup sort buttons with shared functionality
if (sortColumnMap) {
window.setupSharedSortButtons(tableId, tableInstanceGlobal, sortColumnMap);
}
// Initialize sort button states and icons for pagination mode
if (window.isPaginationModeForPage(tableId)) {
const urlParams = new URLSearchParams(window.location.search);
const currentSort = urlParams.get('sort');
const currentOrder = urlParams.get('order');
window.initializeSortButtonStates(currentSort, currentOrder);
}
};

Some files were not shown because too many files have changed in this diff Show More