Compare commits
272 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 21d104280c | |||
| 5946f86dfa | |||
| 0155144881 | |||
| 1353153394 | |||
| 0f45192f8e | |||
| b02f851865 | |||
| bddfbb5235 | |||
| dc34916331 | |||
| a8d36bc5f1 | |||
| bd32ca5b8f | |||
| 2ed60e3fe3 | |||
| 0ec1d37c36 | |||
| 8053f5d30c | |||
| 7eb199d289 | |||
| 6364da676b | |||
| a3d08d8cf6 | |||
| 4b653ac270 | |||
| a70a1660f0 | |||
| 0db749fce0 | |||
| 256108bbdb | |||
| 145d9d5dcb | |||
| b9d42c2866 | |||
| d1988d015e | |||
| 8e458b01d1 | |||
| 989e0d57d0 | |||
| 1097255dca | |||
| 7ffbc41f0a | |||
| 11f9e5782f | |||
| 5f43e979f9 | |||
| 4375f018a4 | |||
| 87472039be | |||
| c1089c349f | |||
| 3f6af51a43 | |||
| bc3cc176ef | |||
| 4a1a265fa8 | |||
| 7c95583345 | |||
| 65f23c1f12 | |||
| aa6c969a6b | |||
| 0bff20215c | |||
| d0147b8061 | |||
| ca0de215ab | |||
| 05b259e494 | |||
| f03fd82be1 | |||
| a769e5464b | |||
| 40871a1c10 | |||
| caac283905 | |||
| 4bc0ef5cc4 | |||
| ec4f44a3ab | |||
| 0a29543939 | |||
| 74fe14f09b | |||
| 787624c432 | |||
| eddf4311d3 | |||
| 90c0c20d75 | |||
| d2d388b142 | |||
| acf06e1955 | |||
| c465e9814c | |||
| 046493294f | |||
| 1096fbdef6 | |||
| fc405e0832 | |||
| cce96af09b | |||
| f953a44593 | |||
| e87cb90e20 | |||
| f3fada9dd8 | |||
| 4eae6b19dc | |||
| 064b79bf9e | |||
| 7c1cb66f67 | |||
| 5641b3e740 | |||
| 9317a1baae | |||
| 6f6d90aa60 | |||
| 83a45795c3 | |||
| 572c52dada | |||
| 909655c10a | |||
| d1b79de411 | |||
| 1e767537b9 | |||
| 8ee0d144be | |||
| f7963b4723 | |||
| 52b6c94483 | |||
| b5236fae51 | |||
| 9d0a48ee2a | |||
| 5677d731e4 | |||
| fcdcd12184 | |||
| e1891e8bd6 | |||
| af53b29818 | |||
| 8a0a7837dc | |||
| 4b3aef577a | |||
| 9a32a3f193 | |||
| c71667cd41 | |||
| 421d635dd3 | |||
| 6bc406b70d | |||
| 5fa145a9d7 | |||
| 3bfd1c17dd | |||
| 46dada312a | |||
| c876e1e3a4 | |||
| 787a376553 | |||
| 7f4be9da36 | |||
| d6f69bca9d | |||
| 3adeef086b | |||
| 40b63fff6a | |||
| 1cac17a420 | |||
| 7bfbbbf298 | |||
| 79f348178c | |||
| 07be7b6004 | |||
| cb24cfc014 | |||
| 418bd5cd9d | |||
| 9953e3921a | |||
| 2d0fa7bf89 | |||
| c6b9f1c61a | |||
| e28ad8b32c | |||
| 6d70dbdf8b | |||
| 1dee03fbea | |||
| bb05fbdd22 | |||
| 3496143962 | |||
| fa3a321b9e | |||
| cf4c575a4f | |||
| a99669d9dc | |||
| b6d69e0f10 | |||
| 6eb0964322 | |||
| 64a9e063ec | |||
| ad24506ab7 | |||
| 56c1a46b37 | |||
| 5d6b373769 | |||
| 9e709039c5 | |||
| e022a6bc1e | |||
| 90a72130df | |||
| 50e6c8bf9c | |||
| 9326c06c3e | |||
| 9642853d8e | |||
| 2e995b615d | |||
| f0cec23da9 | |||
| 195f18f141 | |||
| e7bfa66512 | |||
| 7f684c5f02 | |||
| 16e4c28516 | |||
| 584389e205 | |||
| 3d660c594b | |||
| 7ce029029d | |||
| 82b744334f | |||
| b0c7cd7da5 | |||
| bd8c52941a | |||
| 48e4b59344 | |||
| 4e3ae49187 | |||
| f9e9edd506 | |||
| 76ccb20dfa | |||
| 9a9b5af7f4 | |||
| 8e40b1fd7e | |||
| 8ad525926a | |||
| 0e485ddb71 | |||
| 9b55fd5e33 | |||
| 38e664b733 | |||
| d8046ac174 | |||
| 561720343b | |||
| d45070eb74 | |||
| ac2d2a0b5d | |||
| 56c926a8ef | |||
| 714e84ea09 | |||
| 103c3c3017 | |||
| 9aff7e622d | |||
| ec7fab2a7a | |||
| 187afdc2cf | |||
| b87ff162c1 | |||
| 8ea6a3d003 | |||
| 53d1603e3e | |||
| 2b37934503 | |||
| d0d1e53acc | |||
| 7453d97c81 | |||
| 4cf91a6edd | |||
| 34408a1bff | |||
| eac9fc1793 | |||
| 6903667946 | |||
| 9d6bc332cb | |||
| 1e2f9fb11a | |||
| b6c004c045 | |||
| 2c06ca511e | |||
| 271effd5d2 | |||
| 5ffea66de0 | |||
| 302eafe08c | |||
| 418a332f03 | |||
| f34bbe0602 | |||
| 5ad94078ed | |||
| 739d933900 | |||
| c02321368a | |||
| 030345fe6b | |||
| b8d4f23a84 | |||
| ba8744befb | |||
| d4037cd953 | |||
| 5fcd76febb | |||
| 47261ed420 | |||
| adb2170d47 | |||
| 6262ac7889 | |||
| ece15e97fb | |||
| 6011173c1f | |||
| 6ec4f160f7 | |||
| 23515526c8 | |||
| e9f97a6f5e | |||
| 2260774a58 | |||
| 1f73ae2323 | |||
| 6fdc933c32 | |||
| 0e3637e5ef | |||
| 069ba37e13 | |||
| ca3d4d09d5 | |||
| 8e3816e2e2 | |||
| d80728d133 | |||
| f854a01925 | |||
| 2eb8ebfeca | |||
| cf641b3199 | |||
| d6a729b5a5 | |||
| 637be0d272 | |||
| d15d7ffb61 | |||
| fc3c92e9a3 | |||
| 344d4fb575 | |||
| 7d16e491c8 | |||
| 050b1993da | |||
| 8f5d59394c | |||
| a832ff27f7 | |||
| 4fc96ec38f | |||
| bba741b4a5 | |||
| aed7a520bd | |||
| 3893f2aa19 | |||
| 51f729a18b | |||
| b2d2019bfd | |||
| 257bccc339 | |||
| 728e0050b3 | |||
| 56ad9fba13 | |||
| 160ab066b2 | |||
| 69c7dbaefe | |||
| acbd58ca71 | |||
| b8d6003339 | |||
| 130b3fa84a | |||
| cb58ef83cc | |||
| f016e65b69 | |||
| b142ff5bed | |||
| e2b8b51db8 | |||
| f44192a114 | |||
| cf11e4d718 | |||
| 468cc7ede9 | |||
| a2aafbf93a | |||
| e033dec988 | |||
| d08b7bb063 | |||
| d93723ab4e | |||
| fe13cfdb08 | |||
| 71ccfcd23d | |||
| fc6ff5dd49 | |||
| 482817fd96 | |||
| c4bb3c7607 | |||
| 7ff1605c21 | |||
| 964dd90704 | |||
| 50e5981c58 | |||
| d5f66151b9 | |||
| 711c020c27 | |||
| 9878f426b1 | |||
| 420ff7af7a | |||
| 270838a549 | |||
| 2e36db4d3d | |||
| 0a129209a5 | |||
| 8b82594512 | |||
| 6dd42ed52d | |||
| 26fd9aa3f9 | |||
| 32044dffe4 | |||
| a0fd62b9d2 | |||
| 1f7a984692 | |||
| d1325b595c | |||
| 900492ae14 | |||
| bdf635e427 | |||
| 1afb6f841c | |||
| ee78457e82 | |||
| 25aec890a0 | |||
| 0f53674d8a | |||
| 4350ade65b | |||
| ff1f02b7e3 | |||
| 53309a9502 | |||
| 4762028a39 | |||
| a9bf5e03f8 |
+196
-19
@@ -2,7 +2,7 @@
|
||||
# If set, it will append a direct ORDER BY <whatever you set> to the SQL query
|
||||
# while listing objects. You can look at the structure of the SQLite database to
|
||||
# see the schema and the column names. Some fields are compound and not visible
|
||||
# directly from the schema (joins). You can check the query in the */list.sql files
|
||||
# directly from the schema (joins). You can check the query in the */list.sql and */base/*.sql files
|
||||
# in the source to see all column names.
|
||||
# The usual syntax for those variables is "<table>"."<column>" [ASC|DESC].
|
||||
# For composite fields (CASE, SUM, COUNT) the syntax is <field>, there is no <table> name.
|
||||
@@ -28,9 +28,15 @@
|
||||
# BK_AUTHENTICATION_KEY=change-this-to-something-random
|
||||
|
||||
# Optional: Pattern of the link to Bricklink for a part. Will be passed to Python .format()
|
||||
# Default: https://www.bricklink.com/v2/catalog/catalogitem.page?P={number}
|
||||
# Supports {part} and {color} parameters. BrickLink part numbers and color IDs are used when available.
|
||||
# Default: https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}
|
||||
# BK_BRICKLINK_LINK_PART_PATTERN=
|
||||
|
||||
# Optional: Pattern of the link to Bricklink for a set. Will be passed to Python .format()
|
||||
# Supports {set_num} parameter. Set numbers in format like '10255-1' are used.
|
||||
# Default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num}
|
||||
# BK_BRICKLINK_LINK_SET_PATTERN=
|
||||
|
||||
# Optional: Display Bricklink links wherever applicable
|
||||
# Default: false
|
||||
# BK_BRICKLINK_LINKS=true
|
||||
@@ -55,11 +61,20 @@
|
||||
# Default: 25
|
||||
# BK_DEFAULT_TABLE_PER_PAGE=50
|
||||
|
||||
# Optional: Maximum length for description text in badges before truncating with ellipsis
|
||||
# Default: 15
|
||||
# BK_DESCRIPTION_BADGE_MAX_LENGTH=15
|
||||
|
||||
# Optional: if set up, will add a CORS allow origin restriction to the socket.
|
||||
# Default:
|
||||
# Legacy name: DOMAIN_NAME
|
||||
# BK_DOMAIN_NAME=http://localhost:3333
|
||||
|
||||
# Optional: Format of the timestamp for files on disk (instructions, themes)
|
||||
# Check https://docs.python.org/3/library/time.html#time.strftime for format details
|
||||
# Default: %d/%m/%Y, %H:%M:%S
|
||||
# BK_FILE_DATETIME_FORMAT=%m/%d/%Y, %H:%M
|
||||
|
||||
# Optional: IP address the server will listen on.
|
||||
# Default: 0.0.0.0
|
||||
# BK_HOST=0.0.0.0
|
||||
@@ -91,6 +106,14 @@
|
||||
# Default: false
|
||||
# BK_HIDE_ADMIN=true
|
||||
|
||||
# Optional: Admin sections to expand by default (comma-separated list)
|
||||
# Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
|
||||
# Default: database (maintains original behavior with database section expanded)
|
||||
# Examples:
|
||||
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS=database,theme
|
||||
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS=instructions,metadata
|
||||
# BK_ADMIN_DEFAULT_EXPANDED_SECTIONS= (all sections collapsed)
|
||||
|
||||
# Optional: Hide the 'Instructions' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_ALL_INSTRUCTIONS=true
|
||||
@@ -99,28 +122,64 @@
|
||||
# Default: false
|
||||
# BK_HIDE_ALL_MINIFIGURES=true
|
||||
|
||||
# Optional: Disable the individual/loose minifigures system. This hides all individual
|
||||
# minifigure UI elements and prevents adding new individual minifigures. The routes remain
|
||||
# accessible so existing individual minifigures can still be viewed. Users who only track
|
||||
# set-based minifigures can use this to simplify the interface. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_DISABLE_INDIVIDUAL_MINIFIGURES=false
|
||||
|
||||
# Optional: Hide the 'Parts' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_ALL_PARTS=true
|
||||
|
||||
# Optional: Hide the 'Problems' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# Legacy name: BK_HIDE_MISSING_PARTS
|
||||
# BK_HIDE_ALL_PROBLEMS_PARTS=true
|
||||
|
||||
# Optional: Hide the 'Sets' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_ALL_SETS=true
|
||||
|
||||
# Optional: Hide the 'Missing' entry from the menu. Does not disable the route.
|
||||
# Optional: Hide the 'Storages' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_MISSING_PARTS=true
|
||||
# BK_HIDE_ALL_STORAGES=true
|
||||
|
||||
# Optional: Hide the 'Statistics' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_STATISTICS=true
|
||||
|
||||
# Optional: Hide the 'Instructions' entry in a Set card
|
||||
# Default: false
|
||||
# BK_HIDE_SET_INSTRUCTIONS=true
|
||||
|
||||
# Optional: Hide the 'Damaged' column from the parts table.
|
||||
# Default: false
|
||||
# BK_HIDE_TABLE_DAMAGED_PARTS=true
|
||||
|
||||
# Optional: Hide the 'Missing' column from the parts table.
|
||||
# Default: false
|
||||
# BK_HIDE_TABLE_MISSING_PARTS=true
|
||||
|
||||
# Optional: Hide the 'Checked' column from the parts table.
|
||||
# Default: false
|
||||
# BK_HIDE_TABLE_CHECKED_PARTS=true
|
||||
|
||||
# Optional: Hide the 'Wishlist' entry from the menu. Does not disable the route.
|
||||
# Default: false
|
||||
# BK_HIDE_WISHES=true
|
||||
|
||||
# Optional: Change the default order of minifigures. By default ordered by insertion order.
|
||||
# Note: Minifigures are queried from a combined view that merges both set-based and individual minifigures.
|
||||
# Therefore, column references should use the "combined" table alias.
|
||||
# Useful column names for this option are:
|
||||
# - "minifigures"."fig_num": minifigure ID (fig-xxxxx)
|
||||
# - "minifigures"."name": minifigure name
|
||||
# Default: "minifigures"."name" ASC
|
||||
# BK_MINIFIGURES_DEFAULT_ORDER="minifigures"."name" ASC
|
||||
# - "combined"."figure": minifigure ID (fig-xxxxx)
|
||||
# - "combined"."number": minifigure ID as an integer (xxxxx)
|
||||
# - "combined"."name": minifigure name
|
||||
# - "combined"."rowid": insertion order (for both set and individual minifigures)
|
||||
# Default: "combined"."name" ASC
|
||||
# BK_MINIFIGURES_DEFAULT_ORDER="combined"."name" ASC
|
||||
|
||||
# Optional: Folder where to store the minifigures images, relative to the '/app/static/' folder
|
||||
# Default: minifigs
|
||||
@@ -133,23 +192,81 @@
|
||||
# BK_NO_THREADED_SOCKET=true
|
||||
|
||||
# Optional: Change the default order of parts. By default ordered by insertion order.
|
||||
# Note: Parts are queried from a combined view that merges both set-based and individual minifigure parts.
|
||||
# Some columns use the "combined" table alias for fields from the merged view.
|
||||
# Useful column names for this option are:
|
||||
# - "inventory"."part_num": part number
|
||||
# - "inventory"."name": part name
|
||||
# - "inventory"."color_name": part color name
|
||||
# - "inventory"."is_spare": par is a spare part
|
||||
# - "combined"."part": part number
|
||||
# - "combined"."spare": part is a spare part (use "combined" not "bricktracker_parts")
|
||||
# - "rebrickable_parts"."name": part name
|
||||
# - "rebrickable_parts"."color_name": part color name
|
||||
# - "total_missing": number of missing parts
|
||||
# Default: "inventory"."name" ASC, "inventory"."color_name" ASC, "inventory"."is_spare" ASC
|
||||
# BK_PARTS_DEFAULT_ORDER="total_missing" DESC, "inventory"."name" ASC
|
||||
# Default: "rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "combined"."spare" ASC
|
||||
# BK_PARTS_DEFAULT_ORDER="total_missing" DESC, "rebrickable_parts"."name" ASC
|
||||
|
||||
# Optional: Folder where to store the parts images, relative to the '/app/static/' folder
|
||||
# Default: parts
|
||||
# BK_PARTS_FOLDER=parts
|
||||
|
||||
# Optional: Enable server-side pagination for individual pages (recommended for large collections)
|
||||
# When enabled, pages use server-side pagination with configurable page sizes
|
||||
# When disabled, pages load all data at once with instant client-side search
|
||||
# Default: false for all
|
||||
# BK_SETS_SERVER_SIDE_PAGINATION=true
|
||||
# BK_PARTS_SERVER_SIDE_PAGINATION=true
|
||||
# BK_MINIFIGURES_SERVER_SIDE_PAGINATION=true
|
||||
# BK_PROBLEMS_SERVER_SIDE_PAGINATION=true
|
||||
|
||||
# Optional: Number of parts to show per page on desktop devices (when server-side pagination is enabled)
|
||||
# Default: 10
|
||||
# BK_PARTS_PAGINATION_SIZE_DESKTOP=10
|
||||
|
||||
# Optional: Number of parts to show per page on mobile devices (when server-side pagination is enabled)
|
||||
# Default: 5
|
||||
# BK_PARTS_PAGINATION_SIZE_MOBILE=5
|
||||
|
||||
# Optional: Number of sets to show per page on desktop devices (when server-side pagination is enabled)
|
||||
# Should be divisible by 4 for grid layout. Default: 12
|
||||
# BK_SETS_PAGINATION_SIZE_DESKTOP=12
|
||||
|
||||
# Optional: Number of sets to show per page on mobile devices (when server-side pagination is enabled)
|
||||
# Default: 4
|
||||
# BK_SETS_PAGINATION_SIZE_MOBILE=4
|
||||
|
||||
# Optional: Number of minifigures to show per page on desktop devices (when server-side pagination is enabled)
|
||||
# Default: 10
|
||||
# BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP=10
|
||||
|
||||
# Optional: Number of minifigures to show per page on mobile devices (when server-side pagination is enabled)
|
||||
# Default: 5
|
||||
# BK_MINIFIGURES_PAGINATION_SIZE_MOBILE=5
|
||||
|
||||
# Optional: Number of problems to show per page on desktop devices (when server-side pagination is enabled)
|
||||
# Default: 10
|
||||
# BK_PROBLEMS_PAGINATION_SIZE_DESKTOP=10
|
||||
|
||||
# Optional: Number of problems to show per page on mobile devices (when server-side pagination is enabled)
|
||||
# Default: 5
|
||||
# BK_PROBLEMS_PAGINATION_SIZE_MOBILE=5
|
||||
|
||||
# Optional: Port the server will listen on.
|
||||
# Default: 3333
|
||||
# BK_PORT=3333
|
||||
|
||||
# Optional: Format of the timestamp for purchase dates
|
||||
# Check https://docs.python.org/3/library/time.html#time.strftime for format details
|
||||
# Default: %d/%m/%Y
|
||||
# BK_PURCHASE_DATE_FORMAT=%m/%d/%Y
|
||||
|
||||
# Optional: Currency to display for purchase prices.
|
||||
# Default: €
|
||||
# BK_PURCHASE_CURRENCY=£
|
||||
|
||||
# Optional: Change the default order of purchase locations. By default ordered by insertion order.
|
||||
# Useful column names for this option are:
|
||||
# - "bricktracker_metadata_purchase_locations"."name" ASC: storage name
|
||||
# Default: "bricktracker_metadata_purchase_locations"."name" ASC
|
||||
# BK_PURCHASE_LOCATION_DEFAULT_ORDER="bricktracker_metadata_purchase_locations"."name" ASC
|
||||
|
||||
# Optional: Shuffle the lists on the front page.
|
||||
# Default: false
|
||||
# Legacy name: RANDOM
|
||||
@@ -171,20 +288,47 @@
|
||||
# BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE=
|
||||
|
||||
# Optional: Pattern of the link to Rebrickable for a minifigure. Will be passed to Python .format()
|
||||
# Default: https://rebrickable.com/minifigs/{number}
|
||||
# Default: https://rebrickable.com/minifigs/{figure}
|
||||
# BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN=
|
||||
|
||||
# Optional: Pattern of the link to Rebrickable for a part. Will be passed to Python .format()
|
||||
# Default: https://rebrickable.com/parts/{number}/_/{color}
|
||||
# Default: https://rebrickable.com/parts/{part}/_/{color}
|
||||
# BK_REBRICKABLE_LINK_PART_PATTERN=
|
||||
|
||||
# Optional: Pattern of the link to Rebrickable for instructions. Will be passed to Python .format()
|
||||
# Default: https://rebrickable.com/instructions/{path}
|
||||
# BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN=
|
||||
|
||||
# Optional: User-Agent to use when querying Rebrickable outside of the Rebrick python library
|
||||
# Default: 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
||||
# BK_REBRICKABLE_USER_AGENT=
|
||||
# Optional: User-Agent to use when querying Rebrickable and Peeron outside of the Rebrick python library
|
||||
# Default: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
|
||||
# BK_USER_AGENT=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
|
||||
|
||||
# Legacy: User-Agent for Rebrickable (use BK_USER_AGENT instead)
|
||||
# BK_REBRICKABLE_USER_AGENT=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36
|
||||
|
||||
# Optional: Delay in milliseconds between Peeron page downloads to avoid being potentially blocked
|
||||
# Default: 1000
|
||||
# BK_PEERON_DOWNLOAD_DELAY=1000
|
||||
|
||||
# Optional: Minimum image size (width/height) for valid Peeron instruction pages
|
||||
# Images smaller than this are considered error placeholders and will be rejected
|
||||
# Default: 100
|
||||
# BK_PEERON_MIN_IMAGE_SIZE=100
|
||||
|
||||
# Optional: Pattern for Peeron instruction page URLs. Will be passed to Python .format()
|
||||
# Supports {set_number} and {version_number} parameters
|
||||
# Default: http://peeron.com/scans/{set_number}-{version_number}
|
||||
# BK_PEERON_INSTRUCTION_PATTERN=
|
||||
|
||||
# Optional: Pattern for Peeron thumbnail URLs. Will be passed to Python .format()
|
||||
# Supports {set_number} and {version_number} parameters
|
||||
# Default: http://belay.peeron.com/thumbs/{set_number}-{version_number}/
|
||||
# BK_PEERON_THUMBNAIL_PATTERN=
|
||||
|
||||
# Optional: Pattern for Peeron scan URLs. Will be passed to Python .format()
|
||||
# Supports {set_number} and {version_number} parameters
|
||||
# Default: http://belay.peeron.com/scans/{set_number}-{version_number}/
|
||||
# BK_PEERON_SCAN_PATTERN=
|
||||
|
||||
# Optional: Display Rebrickable links wherever applicable
|
||||
# Default: false
|
||||
@@ -221,6 +365,24 @@
|
||||
# Default: sets
|
||||
# BK_SETS_FOLDER=sets
|
||||
|
||||
# Optional: Enable set consolidation/grouping on the main sets page
|
||||
# When enabled, multiple copies of the same set are grouped together showing instance count
|
||||
# When disabled, each set copy is displayed individually (original behavior)
|
||||
# Default: false
|
||||
# BK_SETS_CONSOLIDATION=true
|
||||
|
||||
# Optional: Make the grid filters displayed by default, rather than collapsed
|
||||
# Default: false
|
||||
# BK_SHOW_GRID_FILTERS=true
|
||||
|
||||
# Optional: Make the grid sort displayed by default, rather than collapsed
|
||||
# Default: false
|
||||
# BK_SHOW_GRID_SORT=true
|
||||
|
||||
# Optional: Show duplicate filter button on sets page
|
||||
# Default: true
|
||||
# BK_SHOW_SETS_DUPLICATE_FILTER=true
|
||||
|
||||
# Optional: Skip saving or displaying spare parts
|
||||
# Default: false
|
||||
# BK_SKIP_SPARE_PARTS=true
|
||||
@@ -233,6 +395,12 @@
|
||||
# Default: /bricksocket/
|
||||
# BK_SOCKET_PATH=custompath
|
||||
|
||||
# Optional: Change the default order of storages. By default ordered by insertion order.
|
||||
# Useful column names for this option are:
|
||||
# - "bricktracker_metadata_storages"."name" ASC: storage name
|
||||
# Default: "bricktracker_metadata_storages"."name" ASC
|
||||
# BK_STORAGE_DEFAULT_ORDER="bricktracker_metadata_storages"."name" ASC
|
||||
|
||||
# Optional: URL to the themes.csv.gz on Rebrickable
|
||||
# Default: https://cdn.rebrickable.com/media/downloads/themes.csv.gz
|
||||
# BK_THEMES_FILE_URL=
|
||||
@@ -260,3 +428,12 @@
|
||||
# - "bricktracker_wishes"."number_of_parts": set number of parts
|
||||
# Default: "bricktracker_wishes"."rowid" DESC
|
||||
# BK_WISHES_DEFAULT_ORDER="bricktracker_wishes"."set" DESC
|
||||
|
||||
# Optional: Show collection growth charts on the statistics page
|
||||
# Default: true
|
||||
# BK_STATISTICS_SHOW_CHARTS=false
|
||||
|
||||
# Optional: Default state of statistics page sections (expanded or collapsed)
|
||||
# When true, all sections start expanded. When false, all sections start collapsed.
|
||||
# Default: true
|
||||
# BK_STATISTICS_DEFAULT_EXPANDED=false
|
||||
|
||||
+12
@@ -21,6 +21,18 @@ static/sets/
|
||||
# Temporary
|
||||
*.csv
|
||||
/local/
|
||||
run_local.sh
|
||||
settings.local.json
|
||||
|
||||
# Apple idiocy
|
||||
.DS_Store
|
||||
|
||||
# Documentation
|
||||
docusaurus/
|
||||
vitepress/
|
||||
|
||||
# Local data
|
||||
offline/
|
||||
TODO.md
|
||||
run-local.sh
|
||||
test-server.sh
|
||||
|
||||
+257
@@ -1,5 +1,262 @@
|
||||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### 1.3
|
||||
|
||||
- Add individual pagination control system per entity type
|
||||
- `BK_SETS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for sets
|
||||
- `BK_PARTS_SERVER_SIDE_PAGINATION`: Enable/disable pagination for parts
|
||||
- `BK_MINIFIGURES_SERVER_SIDE_PAGINATION`: Enable/disable pagination for minifigures
|
||||
- Device-specific pagination sizes (desktop/mobile) for each entity type
|
||||
- Supports search, filtering, and sorting in both server-side and client-side modes
|
||||
- Consolidated duplicate code across parts.js, problems.js, and minifigures.js
|
||||
- Created shared functions in collapsible-state.js for common operations
|
||||
- Fixed dynamic sort icons across all pages
|
||||
- Sort icons now properly toggle between ascending/descending states
|
||||
- Improved DataTable integration
|
||||
- Disabled column header sorting when server-side pagination is enabled
|
||||
- Prevents conflicting sort mechanisms between DataTable and server-side sorting
|
||||
- Enhanced color dropdown functionality
|
||||
- Automatic merging of duplicate color entries with same color_id
|
||||
- Keeps entries with valid RGB data, removes entries with None/empty RGB
|
||||
- Preserves selection state during dropdown consolidation
|
||||
- Consistent search behavior (instant for client-side, Enter key for server-side)
|
||||
- Mobile-friendly pagination navigation
|
||||
- Add Peeron instructions integration
|
||||
- Full image caching system with automatic thumbnail generation
|
||||
- Optimized HTTP calls by downloading full images once and generating thumbnails locally
|
||||
- Automatic cache cleanup after PDF generation to save disk space
|
||||
- Add parts checking/inventory system
|
||||
- New "Checked" column in parts tables for tracking inventory progress
|
||||
- Checkboxes to mark parts as verified during set walkthrough
|
||||
- `BK_HIDE_TABLE_CHECKED_PARTS`: Environment variable to hide checked column
|
||||
- Add set consolidation/grouping functionality
|
||||
- Automatic grouping of duplicate sets on main sets page
|
||||
- Shows instance count with stack icon badge (e.g., "3 copies")
|
||||
- Expandable drawer interface to view all set copies individually
|
||||
- Full set cards for each instance with all badges, statuses, and functionality
|
||||
- `BK_SETS_CONSOLIDATION`: Environment variable to enable/disable consolidation (default: false)
|
||||
- Backwards compatible - when disabled, behaves exactly like original individual view
|
||||
- Improved theme filtering: handles duplicate theme names correctly
|
||||
- Fixed set number sorting: proper numeric sorting in both ascending and descending order
|
||||
- Mixed status indicators for consolidated sets: three-state checkboxes (unchecked/partial/checked) with count badges
|
||||
- Template logic handles three states: none (0/2), all (2/2), partial (1/2) with visual indicators
|
||||
- Purple overlay styling for partial states, disabled checkboxes for read-only consolidated status display
|
||||
- Individual sets maintain full interactive checkbox functionality
|
||||
- Add comprehensive statistics system (#91)
|
||||
- New Statistics page with collection analytics
|
||||
- Financial overview: total cost, average price, price range, investment tracking
|
||||
- Collection metrics: total sets, unique sets, parts count, minifigures count
|
||||
- Theme distribution statistics with clickable drill-down to filtered sets
|
||||
- Storage location statistics showing sets per location with value calculations
|
||||
- Purchase location analytics with spending patterns and date ranges
|
||||
- Problem tracking: missing and damaged parts statistics
|
||||
- Clickable numbers throughout statistics that filter to relevant sets
|
||||
- `BK_HIDE_STATISTICS`: Environment variable to hide statistics menu item
|
||||
- Year-based analytics: Sets by release year and purchases by year
|
||||
- Sets by Release Year: Shows collection distribution across LEGO release years
|
||||
- Purchases by Year: Tracks spending patterns and acquisition timeline
|
||||
- Year summary with peak collection/spending years and timeline insights
|
||||
- Enhanced statistics interface and functionality
|
||||
- Collapsible sections: All statistics sections have clickable headers to expand/collapse
|
||||
- Collection growth charts: Line charts showing sets, parts, and minifigures over time
|
||||
- Configuration options: `BK_STATISTICS_SHOW_CHARTS` and `BK_STATISTICS_DEFAULT_EXPANDED` environment variables
|
||||
- Add configurable admin page section expansion
|
||||
- `BK_ADMIN_DEFAULT_EXPANDED_SECTIONS`: Environment variable to specify which sections expand by default
|
||||
- Accepts comma-separated list of section names (e.g., "database,theme,instructions")
|
||||
- Valid sections: authentication, instructions, image, theme, retired, metadata, owner, purchase_location, status, storage, tag, database
|
||||
- URL parameters take priority over configuration (e.g., `?open_database=1`)
|
||||
- Database section expanded by default to maintain original behavior
|
||||
- Smart metadata handling: sub-section expansion automatically expands parent metadata section
|
||||
- Add duplicate sets filter functionality
|
||||
- New filter button on Sets page to show only duplicate/consolidated sets
|
||||
- `BK_SHOW_SETS_DUPLICATE_FILTER`: Environment variable to show/hide the filter button (default: true)
|
||||
- Works with both server-side and client-side pagination modes
|
||||
- Consolidated mode: Shows sets that have multiple instances
|
||||
- Non-consolidated mode: Shows sets that appear multiple times in collection
|
||||
- Add BrickLink links for sets
|
||||
- BrickLink badge links now appear on set cards and set details pages alongside Rebrickable links
|
||||
- `BK_BRICKLINK_LINK_SET_PATTERN`: New environment variable for BrickLink set URL pattern (default: https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num})
|
||||
- Controlled by existing `BK_BRICKLINK_LINKS` environment variable
|
||||
- Add live environment variable configuration management system
|
||||
- Configuration Management interface in admin panel with live preview and badge system
|
||||
- Live settings: Can be changed without application restart (menu visibility, table display, pagination, features)
|
||||
- Static settings: Require restart but can be edited and saved to .env file (authentication, server, database, API keys)
|
||||
- Advanced badge system showing value status: True/False for booleans, Set/Default/Unset for other values, Changed indicator
|
||||
- Live API endpoints: `/admin/api/config/update` for immediate changes, `/admin/api/config/update-static` for .env updates
|
||||
- Form pre-population with current values and automatic page reload after successful live updates
|
||||
- **BREAKING CHANGE**: Default minifigures folder path changed from `minifigs` to `minifigures`
|
||||
- Impact: Users who relied on the default `BK_MINIFIGURES_FOLDER` value (without explicitly setting it) will need to either:
|
||||
1. Set `BK_MINIFIGURES_FOLDER=minifigs` in their environment to maintain existing behavior, or
|
||||
2. Rename their existing `minifigs` folder to `minifigures`
|
||||
- No impact: Users who already have `BK_MINIFIGURES_FOLDER` explicitly configured
|
||||
- Improved consistency across documentation and Docker configurations
|
||||
- Add performance optimization
|
||||
- SQLite WAL Mode:
|
||||
- Increased cache size to 10,000 pages (~40MB) for faster query execution
|
||||
- Set temp_store to memory for accelerated temporary operations
|
||||
- Enabled foreign key constraints and optimized synchronous mode
|
||||
- Added ANALYZE for improved query planning and statistics
|
||||
- Database Indexes (Migration 0019):
|
||||
- High-impact composite index for problem parts aggregation (`idx_bricktracker_parts_id_missing_damaged`)
|
||||
- Parts lookup optimization (`idx_bricktracker_parts_part_color_spare`)
|
||||
- Set storage filtering (`idx_bricktracker_sets_set_storage`)
|
||||
- Search optimization with case-insensitive indexes (`idx_rebrickable_sets_name_lower`, `idx_rebrickable_parts_name_lower`)
|
||||
- Year and theme filtering optimization (`idx_rebrickable_sets_year`, `idx_rebrickable_sets_theme_id`)
|
||||
- Additional indexes for purchase dates, quantities, sorting, and minifigures aggregation
|
||||
- Statistics Query Optimization:
|
||||
- Replaced separate subqueries with efficient CTEs (Common Table Expressions)
|
||||
- Consolidated aggregations for set, part, minifigure, and financial statistics
|
||||
|
||||
### 1.2.4
|
||||
|
||||
> **Warning**
|
||||
> To use the new BrickLink color parameter in URLs, update your `.env` file:
|
||||
> `BK_BRICKLINK_LINK_PART_PATTERN=https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}`
|
||||
|
||||
- Add BrickLink color and part number support for accurate BrickLink URLs
|
||||
- Database migrations to store BrickLink color ID, color name, and part number
|
||||
- Updated Rebrickable API integration to extract BrickLink data from external_ids
|
||||
- Enhanced BrickLink URL generation with proper part number fallback
|
||||
- Extended admin set refresh to detect and track missing BrickLink data
|
||||
|
||||
## 1.2.3
|
||||
|
||||
Added search/filter/sort options to `parts` and `minifigures`.
|
||||
|
||||
## 1.2.2
|
||||
|
||||
Fix legibility of "Damaged" and "Missing" fields for tiny screen by reducing horizontal padding
|
||||
Fixed instructions download from Rebrickable
|
||||
|
||||
## 1.2.2:
|
||||
|
||||
This release fixes a bug where orphaned parts in the `inventory` table are blocking the database upgrade.
|
||||
|
||||
## 1.2.1:
|
||||
|
||||
This release fixes a bug where you could not add a set if no metadata was configured.
|
||||
|
||||
## 1.2.0:
|
||||
|
||||
> **Warning**
|
||||
> "Missing" part has been renamed to "Problems" to accomodate for missing and damaged parts.
|
||||
> The associated environment variables have changed named (the old names are still valid)
|
||||
|
||||
### Environment
|
||||
|
||||
- Renamed: `BK_HIDE_MISSING_PARTS` -> `BK_HIDE_ALL_PROBLEMS_PARTS`
|
||||
- Added: `BK_HIDE_TABLE_MISSING_PARTS`, hide the Missing column in all tables
|
||||
- Added: `BK_HIDE_TABLE_DAMAGED_PARTS`, hide the Damaged column in all tables
|
||||
- Added: `BK_SHOW_GRID_SORT`, show the sort options on the grid by default
|
||||
- Added: `BK_SHOW_GRID_FILTERS`, show the filter options on the grid by default
|
||||
- Added: `BK_HIDE_ALL_STORAGES`, hide the "Storages" menu entry
|
||||
- Added: `BK_STORAGE_DEFAULT_ORDER`, ordering of storages
|
||||
- Added: `BK_PURCHASE_LOCATION_DEFAULT_ORDER`, ordering of purchase locations
|
||||
- Added: `BK_PURCHASE_CURRENCY`, currency to display for purchase prices
|
||||
- Added: `BK_PURCHASE_DATE_FORMAT`, date format for purchase dates
|
||||
- Documented: `BK_FILE_DATETIME_FORMAT`, date format for files on disk (instructions, theme)
|
||||
|
||||
### Code
|
||||
|
||||
- Changer
|
||||
- Revert the checked state of a checkbox if an error occured
|
||||
|
||||
- Form
|
||||
- Migrate missing input fields to BrickChanger
|
||||
|
||||
- General cleanup
|
||||
|
||||
- Metadata
|
||||
- Underlying class to implement more metadata-like features
|
||||
|
||||
- Minifigure
|
||||
- Deduplicate
|
||||
- Compute number of parts
|
||||
|
||||
- Parts
|
||||
- Damaged parts
|
||||
|
||||
- Sets
|
||||
- Refresh data from Rebrickable
|
||||
- Fix missing @login_required for set deletion
|
||||
- Ownership
|
||||
- Tags
|
||||
- Storage
|
||||
- Purchase location, date, price
|
||||
|
||||
- Storage
|
||||
- Storage content and list
|
||||
|
||||
- Socket
|
||||
- Add decorator for rebrickable, authenticated and threaded socket actions
|
||||
|
||||
- SQL
|
||||
- Allow for advanced migration scenarios through companion python files
|
||||
- Add a bunch of the requested fields into the database for future implementation
|
||||
|
||||
- Wish
|
||||
- Requester
|
||||
|
||||
### UI
|
||||
|
||||
- Add
|
||||
- Allow adding or bulk adding by pressing Enter in the input field
|
||||
|
||||
- Admin
|
||||
- Grey out legacy tables in the database view
|
||||
- Checkboxes renamed to Set statuses
|
||||
- List of sets that may need to be refreshed
|
||||
|
||||
- Cards
|
||||
- Use macros for badge in the card header
|
||||
|
||||
- Form
|
||||
- Add a clear button for dynamic text inputs
|
||||
- Add error message in a tooltip for dynamic inputs
|
||||
|
||||
- Minifigure
|
||||
- Display number of parts
|
||||
|
||||
- Parts
|
||||
- Use Rebrickable URL if stored (+ color code)
|
||||
- Display color and transparency
|
||||
- Display if print of another part
|
||||
- Display prints using the same base
|
||||
- Damaged parts
|
||||
- Display same parts using a different color
|
||||
|
||||
- Sets
|
||||
- Add a flag to hide instructions in a set
|
||||
- Make checkbox clickable on the whole width of the card
|
||||
- Management
|
||||
- Ownership
|
||||
- Tags
|
||||
- Refresh
|
||||
- Storage
|
||||
- Purchase location, date, price
|
||||
|
||||
- Sets grid
|
||||
- Collapsible controls depending on screen size
|
||||
- Manually collapsible filters (with configuration variable for default state)
|
||||
- Manually collapsible sort (with configuration variable for default state)
|
||||
- Clear search bar
|
||||
|
||||
- Storage
|
||||
- Storage list
|
||||
- Storage content
|
||||
|
||||
- Wish
|
||||
- Requester
|
||||
|
||||
## 1.1.1: PDF Instructions Download
|
||||
|
||||
### Instructions
|
||||
|
||||
- Added buttons for instructions download from Rebrickable
|
||||
|
||||
|
||||
## 1.1.0: Deduped sets, custom checkboxes and database upgrade
|
||||
|
||||
### Database
|
||||
|
||||
@@ -5,6 +5,9 @@ WORKDIR /app
|
||||
# Bricktracker
|
||||
COPY . .
|
||||
|
||||
# Fix line endings and set executable permissions for entrypoint script
|
||||
RUN sed -i 's/\r$//' entrypoint.sh && chmod +x entrypoint.sh
|
||||
|
||||
# Python library requirements
|
||||
RUN pip --no-cache-dir install -r requirements.txt
|
||||
|
||||
|
||||
@@ -18,7 +18,9 @@ A web application for organizing and tracking LEGO sets, parts, and minifigures.
|
||||
|
||||
Use the provided [compose.yaml](compose.yaml) file.
|
||||
|
||||
See [setup](docs/setup.md).
|
||||
See [Quickstart](docs/quickstart.md) to get up and running right away.
|
||||
|
||||
See [Setup](docs/setup.md) for a more setup guide.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# This need to be first
|
||||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
import gevent.monkey
|
||||
gevent.monkey.patch_all()
|
||||
|
||||
import logging # noqa: E402
|
||||
|
||||
|
||||
+24
-3
@@ -10,22 +10,31 @@ from bricktracker.configuration_list import BrickConfigurationList
|
||||
from bricktracker.login import LoginManager
|
||||
from bricktracker.navbar import Navbar
|
||||
from bricktracker.sql import close
|
||||
from bricktracker.template_filters import replace_query_filter
|
||||
from bricktracker.version import __version__
|
||||
from bricktracker.views.add import add_page
|
||||
from bricktracker.views.admin.admin import admin_page
|
||||
from bricktracker.views.admin.checkbox import admin_checkbox_page
|
||||
from bricktracker.views.admin.database import admin_database_page
|
||||
from bricktracker.views.admin.image import admin_image_page
|
||||
from bricktracker.views.admin.instructions import admin_instructions_page
|
||||
from bricktracker.views.admin.owner import admin_owner_page
|
||||
from bricktracker.views.admin.purchase_location import admin_purchase_location_page # noqa: E501
|
||||
from bricktracker.views.admin.retired import admin_retired_page
|
||||
from bricktracker.views.admin.set import admin_set_page
|
||||
from bricktracker.views.admin.status import admin_status_page
|
||||
from bricktracker.views.admin.storage import admin_storage_page
|
||||
from bricktracker.views.admin.tag import admin_tag_page
|
||||
from bricktracker.views.admin.theme import admin_theme_page
|
||||
from bricktracker.views.error import error_404
|
||||
from bricktracker.views.index import index_page
|
||||
from bricktracker.views.instructions import instructions_page
|
||||
from bricktracker.views.login import login_page
|
||||
from bricktracker.views.individual_minifigure import individual_minifigure_page
|
||||
from bricktracker.views.minifigure import minifigure_page
|
||||
from bricktracker.views.part import part_page
|
||||
from bricktracker.views.set import set_page
|
||||
from bricktracker.views.statistics import statistics_page
|
||||
from bricktracker.views.storage import storage_page
|
||||
from bricktracker.views.wish import wish_page
|
||||
|
||||
|
||||
@@ -53,7 +62,8 @@ def setup_app(app: Flask) -> None:
|
||||
# Setup the login manager
|
||||
LoginManager(app)
|
||||
|
||||
# I don't know :-)
|
||||
# Configure proxy header handling for reverse proxy deployments (nginx, Apache, etc.)
|
||||
# This ensures proper client IP detection and HTTPS scheme recognition
|
||||
app.wsgi_app = ProxyFix(
|
||||
app.wsgi_app,
|
||||
x_for=1,
|
||||
@@ -71,18 +81,26 @@ def setup_app(app: Flask) -> None:
|
||||
app.register_blueprint(index_page)
|
||||
app.register_blueprint(instructions_page)
|
||||
app.register_blueprint(login_page)
|
||||
app.register_blueprint(individual_minifigure_page)
|
||||
app.register_blueprint(minifigure_page)
|
||||
app.register_blueprint(part_page)
|
||||
app.register_blueprint(set_page)
|
||||
app.register_blueprint(statistics_page)
|
||||
app.register_blueprint(storage_page)
|
||||
app.register_blueprint(wish_page)
|
||||
|
||||
# Register admin routes
|
||||
app.register_blueprint(admin_page)
|
||||
app.register_blueprint(admin_checkbox_page)
|
||||
app.register_blueprint(admin_database_page)
|
||||
app.register_blueprint(admin_image_page)
|
||||
app.register_blueprint(admin_instructions_page)
|
||||
app.register_blueprint(admin_retired_page)
|
||||
app.register_blueprint(admin_owner_page)
|
||||
app.register_blueprint(admin_purchase_location_page)
|
||||
app.register_blueprint(admin_set_page)
|
||||
app.register_blueprint(admin_status_page)
|
||||
app.register_blueprint(admin_storage_page)
|
||||
app.register_blueprint(admin_tag_page)
|
||||
app.register_blueprint(admin_theme_page)
|
||||
|
||||
# An helper to make global variables available to the
|
||||
@@ -109,6 +127,9 @@ def setup_app(app: Flask) -> None:
|
||||
# Version
|
||||
g.version = __version__
|
||||
|
||||
# Register custom Jinja2 filters
|
||||
app.jinja_env.filters['replace_query'] = replace_query_filter
|
||||
|
||||
# Make sure all connections are closed at the end
|
||||
@app.teardown_request
|
||||
def teardown_request(_: BaseException | None) -> None:
|
||||
|
||||
+45
-7
@@ -10,12 +10,15 @@ from typing import Any, Final
|
||||
CONFIG: Final[list[dict[str, Any]]] = [
|
||||
{'n': 'AUTHENTICATION_PASSWORD', 'd': ''},
|
||||
{'n': 'AUTHENTICATION_KEY', 'd': ''},
|
||||
{'n': 'BRICKLINK_LINK_PART_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?P={number}'}, # noqa: E501
|
||||
{'n': 'BRICKLINK_LINK_PART_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?P={part}&C={color}'}, # noqa: E501
|
||||
{'n': 'BRICKLINK_LINK_SET_PATTERN', 'd': 'https://www.bricklink.com/v2/catalog/catalogitem.page?S={set_num}'}, # noqa: E501
|
||||
{'n': 'BRICKLINK_LINKS', 'c': bool},
|
||||
{'n': 'DATABASE_PATH', 'd': './app.db'},
|
||||
{'n': 'DATABASE_TIMESTAMP_FORMAT', 'd': '%Y-%m-%d-%H-%M-%S'},
|
||||
{'n': 'DEBUG', 'c': bool},
|
||||
{'n': 'DEFAULT_TABLE_PER_PAGE', 'd': 25, 'c': int},
|
||||
{'n': 'DESCRIPTION_BADGE_MAX_LENGTH', 'd': 15, 'c': int},
|
||||
{'n': 'DISABLE_INDIVIDUAL_MINIFIGURES', 'c': bool},
|
||||
{'n': 'DOMAIN_NAME', 'e': 'DOMAIN_NAME', 'd': ''},
|
||||
{'n': 'FILE_DATETIME_FORMAT', 'd': '%d/%m/%Y, %H:%M:%S'},
|
||||
{'n': 'HOST', 'd': '0.0.0.0'},
|
||||
@@ -25,38 +28,73 @@ CONFIG: Final[list[dict[str, Any]]] = [
|
||||
{'n': 'HIDE_ADD_SET', 'c': bool},
|
||||
{'n': 'HIDE_ADD_BULK_SET', 'c': bool},
|
||||
{'n': 'HIDE_ADMIN', 'c': bool},
|
||||
{'n': 'ADMIN_DEFAULT_EXPANDED_SECTIONS', 'd': ['database'], 'c': list},
|
||||
{'n': 'HIDE_ALL_INSTRUCTIONS', 'c': bool},
|
||||
{'n': 'HIDE_ALL_MINIFIGURES', 'c': bool},
|
||||
{'n': 'HIDE_ALL_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_ALL_PROBLEMS_PARTS', 'e': 'BK_HIDE_MISSING_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_ALL_SETS', 'c': bool},
|
||||
{'n': 'HIDE_MISSING_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_ALL_STORAGES', 'c': bool},
|
||||
{'n': 'HIDE_STATISTICS', 'c': bool},
|
||||
{'n': 'HIDE_SET_INSTRUCTIONS', 'c': bool},
|
||||
{'n': 'HIDE_TABLE_DAMAGED_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_TABLE_MISSING_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_TABLE_CHECKED_PARTS', 'c': bool},
|
||||
{'n': 'HIDE_WISHES', 'c': bool},
|
||||
{'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"minifigures"."name" ASC'},
|
||||
{'n': 'MINIFIGURES_FOLDER', 'd': 'minifigs', 's': True},
|
||||
{'n': 'MINIFIGURES_DEFAULT_ORDER', 'd': '"combined"."name" ASC'}, # noqa: E501
|
||||
{'n': 'MINIFIGURES_FOLDER', 'd': 'minifigures', 's': True},
|
||||
{'n': 'MINIFIGURES_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
|
||||
{'n': 'MINIFIGURES_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
|
||||
{'n': 'MINIFIGURES_SERVER_SIDE_PAGINATION', 'c': bool},
|
||||
{'n': 'NO_THREADED_SOCKET', 'c': bool},
|
||||
{'n': 'PARTS_DEFAULT_ORDER', 'd': '"inventory"."name" ASC, "inventory"."color_name" ASC, "inventory"."is_spare" ASC'}, # noqa: E501
|
||||
{'n': 'PARTS_SERVER_SIDE_PAGINATION', 'c': bool},
|
||||
{'n': 'SETS_SERVER_SIDE_PAGINATION', 'c': bool},
|
||||
{'n': 'PARTS_DEFAULT_ORDER', 'd': '"rebrickable_parts"."name" ASC, "rebrickable_parts"."color_name" ASC, "combined"."spare" ASC'}, # noqa: E501
|
||||
{'n': 'PARTS_FOLDER', 'd': 'parts', 's': True},
|
||||
{'n': 'PARTS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
|
||||
{'n': 'PARTS_PAGINATION_SIZE_MOBILE', 'd': 5, 'c': int},
|
||||
{'n': 'PROBLEMS_PAGINATION_SIZE_DESKTOP', 'd': 10, 'c': int},
|
||||
{'n': 'PROBLEMS_PAGINATION_SIZE_MOBILE', 'd': 10, 'c': int},
|
||||
{'n': 'PROBLEMS_SERVER_SIDE_PAGINATION', 'c': bool},
|
||||
{'n': 'SETS_PAGINATION_SIZE_DESKTOP', 'd': 12, 'c': int},
|
||||
{'n': 'SETS_PAGINATION_SIZE_MOBILE', 'd': 4, 'c': int},
|
||||
{'n': 'PORT', 'd': 3333, 'c': int},
|
||||
{'n': 'PURCHASE_DATE_FORMAT', 'd': '%d/%m/%Y'},
|
||||
{'n': 'PURCHASE_CURRENCY', 'd': '€'},
|
||||
{'n': 'PURCHASE_LOCATION_DEFAULT_ORDER', 'd': '"bricktracker_metadata_purchase_locations"."name" ASC'}, # noqa: E501
|
||||
{'n': 'RANDOM', 'e': 'RANDOM', 'c': bool},
|
||||
{'n': 'REBRICKABLE_API_KEY', 'e': 'REBRICKABLE_API_KEY', 'd': ''},
|
||||
{'n': 'REBRICKABLE_IMAGE_NIL', 'd': 'https://rebrickable.com/static/img/nil.png'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_IMAGE_NIL_MINIFIGURE', 'd': 'https://rebrickable.com/static/img/nil_mf.jpg'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_LINK_MINIFIGURE_PATTERN', 'd': 'https://rebrickable.com/minifigs/{number}'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_LINK_PART_PATTERN', 'd': 'https://rebrickable.com/parts/{number}/_/{color}'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_LINK_MINIFIGURE_PATTERN', 'd': 'https://rebrickable.com/minifigs/{figure}'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_LINK_PART_PATTERN', 'd': 'https://rebrickable.com/parts/{part}/_/{color}'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_LINK_INSTRUCTIONS_PATTERN', 'd': 'https://rebrickable.com/instructions/{path}'}, # noqa: E501
|
||||
{'n': 'REBRICKABLE_USER_AGENT', 'd': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}, # noqa: E501
|
||||
{'n': 'USER_AGENT', 'd': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}, # noqa: E501
|
||||
{'n': 'PEERON_DOWNLOAD_DELAY', 'd': 1000, 'c': int},
|
||||
{'n': 'PEERON_INSTRUCTION_PATTERN', 'd': 'http://peeron.com/scans/{set_number}-{version_number}'},
|
||||
{'n': 'PEERON_MIN_IMAGE_SIZE', 'd': 100, 'c': int},
|
||||
{'n': 'PEERON_SCAN_PATTERN', 'd': 'http://belay.peeron.com/scans/{set_number}-{version_number}/'},
|
||||
{'n': 'PEERON_THUMBNAIL_PATTERN', 'd': 'http://belay.peeron.com/thumbs/{set_number}-{version_number}/'},
|
||||
{'n': 'REBRICKABLE_LINKS', 'e': 'LINKS', 'c': bool},
|
||||
{'n': 'REBRICKABLE_PAGE_SIZE', 'd': 100, 'c': int},
|
||||
{'n': 'RETIRED_SETS_FILE_URL', 'd': 'https://docs.google.com/spreadsheets/d/1rlYfEXtNKxUOZt2Mfv0H17DvK7bj6Pe0CuYwq6ay8WA/gviz/tq?tqx=out:csv&sheet=Sorted%20by%20Retirement%20Date'}, # noqa: E501
|
||||
{'n': 'RETIRED_SETS_PATH', 'd': './retired_sets.csv'},
|
||||
{'n': 'SETS_DEFAULT_ORDER', 'd': '"rebrickable_sets"."number" DESC, "rebrickable_sets"."version" ASC'}, # noqa: E501
|
||||
{'n': 'SETS_FOLDER', 'd': 'sets', 's': True},
|
||||
{'n': 'SETS_CONSOLIDATION', 'd': False, 'c': bool},
|
||||
{'n': 'SHOW_GRID_FILTERS', 'c': bool},
|
||||
{'n': 'SHOW_GRID_SORT', 'c': bool},
|
||||
{'n': 'SHOW_SETS_DUPLICATE_FILTER', 'd': True, 'c': bool},
|
||||
{'n': 'SKIP_SPARE_PARTS', 'c': bool},
|
||||
{'n': 'SOCKET_NAMESPACE', 'd': 'bricksocket'},
|
||||
{'n': 'SOCKET_PATH', 'd': '/bricksocket/'},
|
||||
{'n': 'STORAGE_DEFAULT_ORDER', 'd': '"bricktracker_metadata_storages"."name" ASC'}, # noqa: E501
|
||||
{'n': 'THEMES_FILE_URL', 'd': 'https://cdn.rebrickable.com/media/downloads/themes.csv.gz'}, # noqa: E501
|
||||
{'n': 'THEMES_PATH', 'd': './themes.csv'},
|
||||
{'n': 'TIMEZONE', 'd': 'Etc/UTC'},
|
||||
{'n': 'USE_REMOTE_IMAGES', 'c': bool},
|
||||
{'n': 'WISHES_DEFAULT_ORDER', 'd': '"bricktracker_wishes"."rowid" DESC'},
|
||||
{'n': 'STATISTICS_SHOW_CHARTS', 'd': True, 'c': bool},
|
||||
{'n': 'STATISTICS_DEFAULT_EXPANDED', 'd': True, 'c': bool},
|
||||
]
|
||||
|
||||
@@ -0,0 +1,314 @@
|
||||
import os
|
||||
import logging
|
||||
from typing import Any, Dict, Final, List, Optional
|
||||
from pathlib import Path
|
||||
from flask import current_app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Environment variables that can be changed live without restart
|
||||
LIVE_CHANGEABLE_VARS: Final[List[str]] = [
|
||||
'BK_BRICKLINK_LINKS',
|
||||
'BK_DEFAULT_TABLE_PER_PAGE',
|
||||
'BK_DESCRIPTION_BADGE_MAX_LENGTH',
|
||||
'BK_INDEPENDENT_ACCORDIONS',
|
||||
'BK_HIDE_ADD_SET',
|
||||
'BK_HIDE_ADD_BULK_SET',
|
||||
'BK_HIDE_ADMIN',
|
||||
'BK_ADMIN_DEFAULT_EXPANDED_SECTIONS',
|
||||
'BK_HIDE_ALL_INSTRUCTIONS',
|
||||
'BK_HIDE_ALL_MINIFIGURES',
|
||||
'BK_HIDE_ALL_PARTS',
|
||||
'BK_HIDE_ALL_PROBLEMS_PARTS',
|
||||
'BK_HIDE_ALL_SETS',
|
||||
'BK_HIDE_ALL_STORAGES',
|
||||
'BK_HIDE_STATISTICS',
|
||||
'BK_HIDE_SET_INSTRUCTIONS',
|
||||
'BK_HIDE_TABLE_DAMAGED_PARTS',
|
||||
'BK_HIDE_TABLE_MISSING_PARTS',
|
||||
'BK_HIDE_TABLE_CHECKED_PARTS',
|
||||
'BK_HIDE_WISHES',
|
||||
'BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP',
|
||||
'BK_MINIFIGURES_PAGINATION_SIZE_MOBILE',
|
||||
'BK_MINIFIGURES_SERVER_SIDE_PAGINATION',
|
||||
'BK_PARTS_PAGINATION_SIZE_DESKTOP',
|
||||
'BK_PARTS_PAGINATION_SIZE_MOBILE',
|
||||
'BK_PARTS_SERVER_SIDE_PAGINATION',
|
||||
'BK_SETS_SERVER_SIDE_PAGINATION',
|
||||
'BK_PROBLEMS_PAGINATION_SIZE_DESKTOP',
|
||||
'BK_PROBLEMS_PAGINATION_SIZE_MOBILE',
|
||||
'BK_PROBLEMS_SERVER_SIDE_PAGINATION',
|
||||
'BK_SETS_PAGINATION_SIZE_DESKTOP',
|
||||
'BK_SETS_PAGINATION_SIZE_MOBILE',
|
||||
'BK_SETS_CONSOLIDATION',
|
||||
'BK_RANDOM',
|
||||
'BK_REBRICKABLE_LINKS',
|
||||
'BK_SHOW_GRID_FILTERS',
|
||||
'BK_SHOW_GRID_SORT',
|
||||
'BK_SHOW_SETS_DUPLICATE_FILTER',
|
||||
'BK_SKIP_SPARE_PARTS',
|
||||
'BK_USE_REMOTE_IMAGES',
|
||||
'BK_PEERON_DOWNLOAD_DELAY',
|
||||
'BK_PEERON_MIN_IMAGE_SIZE',
|
||||
'BK_REBRICKABLE_PAGE_SIZE',
|
||||
'BK_STATISTICS_SHOW_CHARTS',
|
||||
'BK_STATISTICS_DEFAULT_EXPANDED',
|
||||
# Default ordering and formatting
|
||||
'BK_INSTRUCTIONS_ALLOWED_EXTENSIONS',
|
||||
'BK_MINIFIGURES_DEFAULT_ORDER',
|
||||
'BK_PARTS_DEFAULT_ORDER',
|
||||
'BK_SETS_DEFAULT_ORDER',
|
||||
'BK_PURCHASE_LOCATION_DEFAULT_ORDER',
|
||||
'BK_STORAGE_DEFAULT_ORDER',
|
||||
'BK_WISHES_DEFAULT_ORDER',
|
||||
# URL and Pattern Variables
|
||||
'BK_BRICKLINK_LINK_PART_PATTERN',
|
||||
'BK_BRICKLINK_LINK_SET_PATTERN',
|
||||
'BK_REBRICKABLE_IMAGE_NIL',
|
||||
'BK_REBRICKABLE_IMAGE_NIL_MINIFIGURE',
|
||||
'BK_REBRICKABLE_LINK_MINIFIGURE_PATTERN',
|
||||
'BK_REBRICKABLE_LINK_PART_PATTERN',
|
||||
'BK_REBRICKABLE_LINK_INSTRUCTIONS_PATTERN',
|
||||
'BK_PEERON_INSTRUCTION_PATTERN',
|
||||
'BK_PEERON_SCAN_PATTERN',
|
||||
'BK_PEERON_THUMBNAIL_PATTERN',
|
||||
'BK_RETIRED_SETS_FILE_URL',
|
||||
'BK_RETIRED_SETS_PATH',
|
||||
'BK_THEMES_FILE_URL',
|
||||
'BK_THEMES_PATH'
|
||||
]
|
||||
|
||||
# Environment variables that require restart
|
||||
RESTART_REQUIRED_VARS: Final[List[str]] = [
|
||||
'BK_AUTHENTICATION_PASSWORD',
|
||||
'BK_AUTHENTICATION_KEY',
|
||||
'BK_DATABASE_PATH',
|
||||
'BK_DEBUG',
|
||||
'BK_DISABLE_INDIVIDUAL_MINIFIGURES',
|
||||
'BK_DOMAIN_NAME',
|
||||
'BK_HOST',
|
||||
'BK_PORT',
|
||||
'BK_SOCKET_NAMESPACE',
|
||||
'BK_SOCKET_PATH',
|
||||
'BK_NO_THREADED_SOCKET',
|
||||
'BK_TIMEZONE',
|
||||
'BK_REBRICKABLE_API_KEY',
|
||||
'BK_INSTRUCTIONS_FOLDER',
|
||||
'BK_PARTS_FOLDER',
|
||||
'BK_SETS_FOLDER',
|
||||
'BK_MINIFIGURES_FOLDER',
|
||||
'BK_DATABASE_TIMESTAMP_FORMAT',
|
||||
'BK_FILE_DATETIME_FORMAT',
|
||||
'BK_PURCHASE_DATE_FORMAT',
|
||||
'BK_PURCHASE_CURRENCY',
|
||||
'BK_REBRICKABLE_USER_AGENT',
|
||||
'BK_USER_AGENT'
|
||||
]
|
||||
|
||||
class ConfigManager:
|
||||
"""Manages live configuration updates for BrickTracker"""
|
||||
|
||||
def __init__(self):
|
||||
self.env_file_path = Path('.env')
|
||||
|
||||
def get_current_config(self) -> Dict[str, Any]:
|
||||
"""Get current configuration values for live-changeable variables"""
|
||||
config = {}
|
||||
for var in LIVE_CHANGEABLE_VARS:
|
||||
# Get internal config name
|
||||
internal_name = var.replace('BK_', '')
|
||||
# Get current value from Flask config
|
||||
if internal_name in current_app.config:
|
||||
config[var] = current_app.config[internal_name]
|
||||
else:
|
||||
# Fallback to environment variable
|
||||
config[var] = os.environ.get(var, '')
|
||||
return config
|
||||
|
||||
def get_restart_required_config(self) -> Dict[str, Any]:
|
||||
"""Get current configuration values for restart-required variables"""
|
||||
config = {}
|
||||
for var in RESTART_REQUIRED_VARS:
|
||||
# Get internal config name
|
||||
internal_name = var.replace('BK_', '')
|
||||
# Get current value from Flask config
|
||||
if internal_name in current_app.config:
|
||||
config[var] = current_app.config[internal_name]
|
||||
else:
|
||||
# Fallback to environment variable
|
||||
config[var] = os.environ.get(var, '')
|
||||
return config
|
||||
|
||||
def update_config(self, updates: Dict[str, Any]) -> Dict[str, str]:
|
||||
"""Update configuration values. Returns dict with status for each update"""
|
||||
results = {}
|
||||
for var_name, new_value in updates.items():
|
||||
if var_name not in LIVE_CHANGEABLE_VARS:
|
||||
results[var_name] = f"Error: {var_name} requires restart to change"
|
||||
continue
|
||||
|
||||
try:
|
||||
# Update environment variable
|
||||
os.environ[var_name] = str(new_value)
|
||||
# Update Flask config
|
||||
internal_name = var_name.replace('BK_', '')
|
||||
cast_value = self._cast_value(var_name, new_value)
|
||||
current_app.config[internal_name] = cast_value
|
||||
# Update .env file
|
||||
self._update_env_file(var_name, new_value)
|
||||
results[var_name] = "Updated successfully"
|
||||
if current_app.debug:
|
||||
logger.info(f"Config updated: {var_name}={new_value}")
|
||||
except Exception as e:
|
||||
results[var_name] = f"Error: {str(e)}"
|
||||
logger.error(f"Failed to update {var_name}: {e}")
|
||||
return results
|
||||
|
||||
def _cast_value(self, var_name: str, value: Any) -> Any:
|
||||
"""Cast value to appropriate type based on variable name"""
|
||||
# List variables (admin sections) - Check this FIRST before boolean check
|
||||
if 'sections' in var_name.lower():
|
||||
if isinstance(value, str):
|
||||
return [section.strip() for section in value.split(',') if section.strip()]
|
||||
elif isinstance(value, list):
|
||||
return value
|
||||
else:
|
||||
return []
|
||||
# Integer variables (pagination sizes, delays, etc.) - Check BEFORE boolean check
|
||||
if any(keyword in var_name.lower() for keyword in ['_size', '_page', 'delay', 'min_', 'per_page', 'page_size', '_length']):
|
||||
try:
|
||||
return int(value)
|
||||
except (ValueError, TypeError):
|
||||
return 0
|
||||
# Boolean variables - More specific patterns to avoid conflicts
|
||||
if any(keyword in var_name.lower() for keyword in ['hide_', 'server_side_pagination', '_links', 'random', 'skip_', 'show_', 'use_', '_consolidation', '_charts', '_expanded']):
|
||||
if isinstance(value, str):
|
||||
return value.lower() in ('true', '1', 'yes', 'on')
|
||||
return bool(value)
|
||||
# String variables (default)
|
||||
return str(value)
|
||||
|
||||
def _format_env_value(self, value: Any) -> str:
|
||||
"""Format value for .env file storage"""
|
||||
if isinstance(value, bool):
|
||||
return 'true' if value else 'false'
|
||||
elif isinstance(value, (int, float)):
|
||||
return str(value)
|
||||
elif isinstance(value, list):
|
||||
return ','.join(str(item) for item in value)
|
||||
elif value is None:
|
||||
return ''
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
def _update_env_file(self, var_name: str, value: Any) -> None:
|
||||
"""Update the .env file with new value"""
|
||||
if not self.env_file_path.exists():
|
||||
self.env_file_path.touch()
|
||||
|
||||
# Read current .env content
|
||||
lines = []
|
||||
if self.env_file_path.exists():
|
||||
with open(self.env_file_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
# Format value for .env file
|
||||
env_value = self._format_env_value(value)
|
||||
|
||||
# Find and update the line, or add new line
|
||||
updated = False
|
||||
|
||||
# First pass: Look for existing active variable
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith(f"{var_name}="):
|
||||
lines[i] = f"{var_name}={env_value}\n"
|
||||
updated = True
|
||||
break
|
||||
|
||||
# Second pass: If not found, look for commented-out variable
|
||||
if not updated:
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
# Check for commented-out variable: # BK_VAR= or #BK_VAR=
|
||||
if stripped.startswith('#') and var_name in stripped:
|
||||
# Extract the part after #, handling optional space
|
||||
comment_content = stripped[1:].strip()
|
||||
if comment_content.startswith(f"{var_name}=") or comment_content.startswith(f"{var_name} ="):
|
||||
# Uncomment and set new value, preserving any leading whitespace from original line
|
||||
leading_whitespace = line[:len(line) - len(line.lstrip())]
|
||||
lines[i] = f"{leading_whitespace}{var_name}={env_value}\n"
|
||||
updated = True
|
||||
logger.info(f"Uncommented and updated {var_name} in .env file")
|
||||
break
|
||||
|
||||
# Third pass: If still not found, append to end
|
||||
if not updated:
|
||||
lines.append(f"{var_name}={env_value}\n")
|
||||
logger.info(f"Added new {var_name} to end of .env file")
|
||||
|
||||
# Write back to file
|
||||
with open(self.env_file_path, 'w', encoding='utf-8') as f:
|
||||
f.writelines(lines)
|
||||
|
||||
def validate_config(self) -> Dict[str, Any]:
|
||||
"""Validate current configuration"""
|
||||
issues = []
|
||||
warnings = []
|
||||
|
||||
# Check if critical variables are set
|
||||
if not os.environ.get('BK_REBRICKABLE_API_KEY'):
|
||||
warnings.append("BK_REBRICKABLE_API_KEY not set - some features may not work")
|
||||
|
||||
# Check for conflicting settings
|
||||
if (os.environ.get('BK_PARTS_SERVER_SIDE_PAGINATION', '').lower() == 'false' and
|
||||
int(os.environ.get('BK_PARTS_PAGINATION_SIZE_DESKTOP', '10')) > 100):
|
||||
warnings.append("Large pagination size with client-side pagination may cause performance issues")
|
||||
|
||||
# Check pagination sizes are reasonable
|
||||
for var in ['BK_SETS_PAGINATION_SIZE_DESKTOP', 'BK_PARTS_PAGINATION_SIZE_DESKTOP', 'BK_MINIFIGURES_PAGINATION_SIZE_DESKTOP']:
|
||||
try:
|
||||
size = int(os.environ.get(var, '10'))
|
||||
if size < 1:
|
||||
issues.append(f"{var} must be at least 1")
|
||||
elif size > 1000:
|
||||
warnings.append(f"{var} is very large ({size}) - may cause performance issues")
|
||||
except ValueError:
|
||||
issues.append(f"{var} must be a valid integer")
|
||||
|
||||
return {
|
||||
'issues': issues,
|
||||
'warnings': warnings,
|
||||
'status': 'valid' if not issues else 'has_issues'
|
||||
}
|
||||
|
||||
def get_variable_help(self, var_name: str) -> str:
|
||||
"""Get help text for a configuration variable"""
|
||||
help_text = {
|
||||
'BK_BRICKLINK_LINKS': 'Show BrickLink links throughout the application',
|
||||
'BK_DEFAULT_TABLE_PER_PAGE': 'Default number of items per page in tables',
|
||||
'BK_INDEPENDENT_ACCORDIONS': 'Make accordion sections independent (can open multiple)',
|
||||
'BK_HIDE_ADD_SET': 'Hide the "Add Set" menu entry',
|
||||
'BK_HIDE_ADD_BULK_SET': 'Hide the "Add Bulk Set" menu entry',
|
||||
'BK_HIDE_ADMIN': 'Hide the "Admin" menu entry',
|
||||
'BK_ADMIN_DEFAULT_EXPANDED_SECTIONS': 'Admin sections to expand by default (comma-separated)',
|
||||
'BK_HIDE_ALL_INSTRUCTIONS': 'Hide the "Instructions" menu entry',
|
||||
'BK_HIDE_ALL_MINIFIGURES': 'Hide the "Minifigures" menu entry',
|
||||
'BK_HIDE_ALL_PARTS': 'Hide the "Parts" menu entry',
|
||||
'BK_HIDE_ALL_PROBLEMS_PARTS': 'Hide the "Problems" menu entry',
|
||||
'BK_HIDE_ALL_SETS': 'Hide the "Sets" menu entry',
|
||||
'BK_HIDE_ALL_STORAGES': 'Hide the "Storages" menu entry',
|
||||
'BK_HIDE_STATISTICS': 'Hide the "Statistics" menu entry',
|
||||
'BK_HIDE_SET_INSTRUCTIONS': 'Hide instructions section in set details',
|
||||
'BK_HIDE_TABLE_DAMAGED_PARTS': 'Hide the "Damaged" column in parts tables',
|
||||
'BK_HIDE_TABLE_MISSING_PARTS': 'Hide the "Missing" column in parts tables',
|
||||
'BK_HIDE_TABLE_CHECKED_PARTS': 'Hide the "Checked" column in parts tables',
|
||||
'BK_HIDE_WISHES': 'Hide the "Wishes" menu entry',
|
||||
'BK_SETS_CONSOLIDATION': 'Enable set consolidation/grouping functionality',
|
||||
'BK_SHOW_GRID_FILTERS': 'Show filter options on grids by default',
|
||||
'BK_SHOW_GRID_SORT': 'Show sort options on grids by default',
|
||||
'BK_SKIP_SPARE_PARTS': 'Skip spare parts when importing sets',
|
||||
'BK_USE_REMOTE_IMAGES': 'Use remote images from Rebrickable CDN instead of local',
|
||||
'BK_STATISTICS_SHOW_CHARTS': 'Show collection growth charts on statistics page',
|
||||
'BK_STATISTICS_DEFAULT_EXPANDED': 'Expand all statistics sections by default'
|
||||
}
|
||||
return help_text.get(var_name, 'No help available for this variable')
|
||||
@@ -0,0 +1,492 @@
|
||||
import logging
|
||||
import traceback
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import current_app, url_for
|
||||
|
||||
from .exceptions import NotFoundException, DatabaseException, ErrorException
|
||||
from .parser import parse_minifig
|
||||
from .rebrickable import Rebrickable
|
||||
from .rebrickable_minifigure import RebrickableMinifigure
|
||||
from .set_owner_list import BrickSetOwnerList
|
||||
from .set_purchase_location_list import BrickSetPurchaseLocationList
|
||||
from .set_storage_list import BrickSetStorageList
|
||||
from .set_tag_list import BrickSetTagList
|
||||
from .sql import BrickSQL
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Individual minifigure (not associated with a set)
|
||||
class IndividualMinifigure(RebrickableMinifigure):
|
||||
# Queries
|
||||
select_query: str = 'individual_minifigure/select/by_id'
|
||||
light_query: str = 'individual_minifigure/select/light'
|
||||
insert_query: str = 'individual_minifigure/insert'
|
||||
|
||||
# Delete a individual minifigure
|
||||
def delete(self, /) -> None:
|
||||
BrickSQL().executescript(
|
||||
'individual_minifigure/delete/individual_minifigure',
|
||||
id=self.fields.id
|
||||
)
|
||||
|
||||
# Import a individual minifigure into the database
|
||||
def download(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
|
||||
# Load the minifigure
|
||||
if not self.load(socket, data, from_download=True):
|
||||
return False
|
||||
|
||||
try:
|
||||
# Insert into the database
|
||||
socket.auto_progress(
|
||||
message='Minifigure {figure}: inserting into database'.format(
|
||||
figure=self.fields.figure
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
# Generate an UUID for self
|
||||
self.fields.id = str(uuid4())
|
||||
|
||||
# Save the storage
|
||||
storage = BrickSetStorageList.get(
|
||||
data.get('storage', ''),
|
||||
allow_none=True
|
||||
)
|
||||
self.fields.storage = storage.fields.id if storage else None
|
||||
|
||||
# Save the purchase location
|
||||
purchase_location = BrickSetPurchaseLocationList.get(
|
||||
data.get('purchase_location', ''),
|
||||
allow_none=True
|
||||
)
|
||||
self.fields.purchase_location = purchase_location.fields.id if purchase_location else None
|
||||
|
||||
# Save quantity and description
|
||||
self.fields.quantity = int(data.get('quantity', 1))
|
||||
self.fields.description = data.get('description', '')
|
||||
|
||||
# IMPORTANT: Insert rebrickable minifigure FIRST
|
||||
# bricktracker_individual_minifigures has FK to rebrickable_minifigures
|
||||
self.insert_rebrickable_loose()
|
||||
|
||||
# Now insert into bricktracker_individual_minifigures
|
||||
# Use no_defer=True to ensure the insert happens before we insert parts
|
||||
# (parts have a foreign key constraint on this id)
|
||||
self.insert(commit=False, no_defer=True)
|
||||
|
||||
# Save the owners
|
||||
owners: list[str] = list(data.get('owners', []))
|
||||
for id in owners:
|
||||
owner = BrickSetOwnerList.get(id)
|
||||
owner.update_individual_minifigure_state(self, state=True)
|
||||
|
||||
# Save the tags
|
||||
tags: list[str] = list(data.get('tags', []))
|
||||
for id in tags:
|
||||
tag = BrickSetTagList.get(id)
|
||||
tag.update_individual_minifigure_state(self, state=True)
|
||||
|
||||
# Load the parts (elements) for this minifigure
|
||||
if not self.download_parts(socket):
|
||||
return False
|
||||
|
||||
# Commit the transaction to the database
|
||||
socket.auto_progress(
|
||||
message='Minifigure {figure}: writing to the database'.format(
|
||||
figure=self.fields.figure
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
BrickSQL().commit()
|
||||
|
||||
# Info
|
||||
logger.info('Minifigure {figure}: imported (id: {id})'.format(
|
||||
figure=self.fields.figure,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# Complete
|
||||
socket.complete(
|
||||
message='Minifigure {figure}: imported (<a href="{url}">Go to the minifigure</a>)'.format(
|
||||
figure=self.fields.figure,
|
||||
url=self.url()
|
||||
),
|
||||
download=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error while importing minifigure {figure}: {error}'.format(
|
||||
figure=self.fields.figure,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Download parts (elements) for this individual minifigure
|
||||
def download_parts(self, socket: 'BrickSocket', /) -> bool:
|
||||
"""Download minifigure parts using get_minifig_elements()"""
|
||||
try:
|
||||
# Check if we have cached parts data from load()
|
||||
if hasattr(self, '_cached_parts_response'):
|
||||
response = self._cached_parts_response
|
||||
logger.debug('Using cached parts data from load()')
|
||||
else:
|
||||
# Need to fetch parts data
|
||||
socket.auto_progress(
|
||||
message='Minifigure {figure}: loading parts from Rebrickable'.format(
|
||||
figure=self.fields.figure
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
logger.debug('rebrick.lego.get_minifig_elements("{figure}")'.format(
|
||||
figure=self.fields.figure,
|
||||
))
|
||||
|
||||
# Load parts data from Rebrickable API
|
||||
import json
|
||||
from rebrick import lego
|
||||
|
||||
parameters = {
|
||||
'api_key': current_app.config['REBRICKABLE_API_KEY'],
|
||||
'page_size': current_app.config['REBRICKABLE_PAGE_SIZE'],
|
||||
}
|
||||
|
||||
response = json.loads(lego.get_minifig_elements(
|
||||
self.fields.figure,
|
||||
**parameters
|
||||
).read())
|
||||
|
||||
socket.auto_progress(
|
||||
message='Minifigure {figure}: saving parts to database'.format(
|
||||
figure=self.fields.figure
|
||||
),
|
||||
)
|
||||
|
||||
# Insert each part into individual_minifigure_parts table
|
||||
from .rebrickable_part import RebrickablePart
|
||||
|
||||
if 'results' in response:
|
||||
logger.debug(f'Processing {len(response["results"])} parts for minifigure {self.fields.figure}')
|
||||
|
||||
for idx, result in enumerate(response['results']):
|
||||
part_num = result['part']['part_num']
|
||||
color_id = result['color']['id']
|
||||
|
||||
logger.debug(
|
||||
f'Part {idx+1}/{len(response["results"])}: {part_num} '
|
||||
f'(color: {color_id}, quantity: {result["quantity"]})'
|
||||
)
|
||||
|
||||
# Insert rebrickable part data first
|
||||
part_data = RebrickablePart.from_rebrickable(result)
|
||||
logger.debug(f'Rebrickable part data keys: {list(part_data.keys())}')
|
||||
|
||||
# Insert into rebrickable_parts if not exists
|
||||
BrickSQL().execute(
|
||||
'rebrickable/part/insert',
|
||||
parameters=part_data,
|
||||
commit=False,
|
||||
)
|
||||
|
||||
# Download part image if not using remote images
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
# Create a RebrickablePart instance for image download
|
||||
from .set import BrickSet
|
||||
try:
|
||||
part_instance = RebrickablePart(record=part_data)
|
||||
from .rebrickable_image import RebrickableImage
|
||||
RebrickableImage(
|
||||
BrickSet(), # Dummy set
|
||||
minifigure=self,
|
||||
part=part_instance,
|
||||
).download()
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Could not download image for part {part_num}: {e}'
|
||||
)
|
||||
|
||||
# Insert into bricktracker_individual_minifigure_parts
|
||||
individual_part_params = {
|
||||
'id': self.fields.id,
|
||||
'part': part_num,
|
||||
'color': color_id,
|
||||
'spare': result.get('is_spare', False),
|
||||
'quantity': result['quantity'],
|
||||
'element': result.get('element_id'),
|
||||
'rebrickable_inventory': result['id'],
|
||||
}
|
||||
logger.debug(f'Individual part params: {individual_part_params}')
|
||||
|
||||
BrickSQL().execute(
|
||||
'individual_minifigure/part/insert',
|
||||
parameters=individual_part_params,
|
||||
commit=False,
|
||||
)
|
||||
|
||||
logger.debug(f'Successfully inserted all {len(response["results"])} parts')
|
||||
else:
|
||||
logger.warning(f'No results in parts response for minifigure {self.fields.figure}')
|
||||
|
||||
# Clean up cached data
|
||||
if hasattr(self, '_cached_parts_response'):
|
||||
delattr(self, '_cached_parts_response')
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error loading parts for minifigure {figure}: {error}'.format(
|
||||
figure=self.fields.figure,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
logger.debug(traceback.format_exc())
|
||||
return False
|
||||
|
||||
# Insert the individual minifigure from Rebrickable
|
||||
def insert_rebrickable_loose(self, /) -> None:
|
||||
"""Insert rebrickable minifigure data (without set association)"""
|
||||
# Insert the Rebrickable minifigure to the database
|
||||
# Note: We override the parent's insert_rebrickable since we don't have a brickset
|
||||
from .rebrickable_image import RebrickableImage
|
||||
|
||||
# Explicitly build parameters for rebrickable_minifigures insert
|
||||
params = {
|
||||
'figure': self.fields.figure,
|
||||
'number': self.fields.number,
|
||||
'name': self.fields.name,
|
||||
'image': self.fields.image,
|
||||
'number_of_parts': self.fields.number_of_parts,
|
||||
}
|
||||
|
||||
BrickSQL().execute(
|
||||
RebrickableMinifigure.insert_query,
|
||||
parameters=params,
|
||||
commit=False,
|
||||
)
|
||||
|
||||
# Download image locally if not using remote images
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
# Create a dummy BrickSet for RebrickableImage
|
||||
# RebrickableImage checks minifigure first before set, so this works
|
||||
from .set import BrickSet
|
||||
try:
|
||||
RebrickableImage(
|
||||
BrickSet(), # Dummy set - not used since minifigure takes priority
|
||||
minifigure=self,
|
||||
).download()
|
||||
logger.debug(f'Downloaded image for individual minifigure {self.fields.figure}')
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Could not download image for individual minifigure {self.fields.figure}: {e}'
|
||||
)
|
||||
|
||||
# Load the minifigure from Rebrickable
|
||||
def load(
|
||||
self,
|
||||
socket: 'BrickSocket',
|
||||
data: dict[str, Any],
|
||||
/,
|
||||
*,
|
||||
from_download=False,
|
||||
) -> bool:
|
||||
# Reset the progress
|
||||
socket.progress_count = 0
|
||||
socket.progress_total = 2
|
||||
|
||||
try:
|
||||
# Check if individual minifigures are disabled
|
||||
from flask import current_app
|
||||
if current_app.config.get('DISABLE_INDIVIDUAL_MINIFIGURES', False):
|
||||
raise ErrorException(
|
||||
'Individual minifigures system is disabled. '
|
||||
'Only set-based minifigures can be added.'
|
||||
)
|
||||
|
||||
socket.auto_progress(message='Parsing minifigure number')
|
||||
figure = parse_minifig(str(data['figure']))
|
||||
|
||||
socket.auto_progress(
|
||||
message='Minifigure {figure}: loading from Rebrickable'.format(
|
||||
figure=figure,
|
||||
),
|
||||
)
|
||||
|
||||
logger.debug('rebrick.lego.get_minifig_elements("{figure}")'.format(
|
||||
figure=figure,
|
||||
))
|
||||
|
||||
# Load from Rebrickable using get_minifig_elements
|
||||
# This gives us both minifigure info and parts in one call
|
||||
import json
|
||||
from rebrick import lego
|
||||
|
||||
parameters = {
|
||||
'api_key': current_app.config['REBRICKABLE_API_KEY'],
|
||||
'page_size': current_app.config['REBRICKABLE_PAGE_SIZE'],
|
||||
}
|
||||
|
||||
response = json.loads(lego.get_minifig_elements(
|
||||
figure,
|
||||
**parameters
|
||||
).read())
|
||||
|
||||
# Extract minifigure info from the first part's metadata
|
||||
if 'results' in response and len(response['results']) > 0:
|
||||
first_part = response['results'][0]
|
||||
|
||||
# Build minifigure data from the response
|
||||
self.fields.figure = first_part['set_num']
|
||||
self.fields.number_of_parts = response['count']
|
||||
|
||||
# We need to fetch the proper name and image from get_minifig()
|
||||
# This is a small additional call but gives us the proper minifigure data
|
||||
try:
|
||||
# get_minifig() only needs api_key, not page_size
|
||||
minifig_params = {
|
||||
'api_key': current_app.config['REBRICKABLE_API_KEY']
|
||||
}
|
||||
minifig_response = json.loads(lego.get_minifig(
|
||||
figure,
|
||||
**minifig_params
|
||||
).read())
|
||||
self.fields.name = minifig_response.get('name', f"Minifigure {figure}")
|
||||
|
||||
# Use the minifig image from get_minifig() - this is the assembled minifig
|
||||
self.fields.image = minifig_response.get('set_img_url')
|
||||
|
||||
# Extract number from figure (e.g., fig-005997 -> 5997)
|
||||
try:
|
||||
self.fields.number = int(figure.split('-')[1])
|
||||
except:
|
||||
self.fields.number = 0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f'Could not fetch minifigure name: {e}')
|
||||
self.fields.name = f"Minifigure {figure}"
|
||||
# Try to extract number anyway
|
||||
try:
|
||||
self.fields.number = int(figure.split('-')[1])
|
||||
except:
|
||||
self.fields.number = 0
|
||||
|
||||
# Fallback: try to extract image from first part with element_id
|
||||
self.fields.image = None
|
||||
for result in response['results']:
|
||||
if result.get('element_id') and result['part'].get('part_img_url'):
|
||||
self.fields.image = result['part']['part_img_url']
|
||||
break
|
||||
|
||||
# Store the parts data for later use in download
|
||||
self._cached_parts_response = response
|
||||
else:
|
||||
raise NotFoundException(f'Minifigure {figure} has no parts in Rebrickable')
|
||||
|
||||
socket.emit('MINIFIGURE_LOADED', self.short(
|
||||
from_download=from_download
|
||||
))
|
||||
|
||||
if not from_download:
|
||||
socket.complete(
|
||||
message='Minifigure {figure}: loaded from Rebrickable'.format(
|
||||
figure=self.fields.figure
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
# Check if this is the "disabled" error - if so, show cleaner message
|
||||
error_msg = str(e)
|
||||
if 'Individual minifigures system is disabled' in error_msg:
|
||||
socket.fail(message=error_msg)
|
||||
else:
|
||||
socket.fail(
|
||||
message='Could not load the minifigure from Rebrickable: {error}. Data: {data}'.format(
|
||||
error=error_msg,
|
||||
data=data,
|
||||
)
|
||||
)
|
||||
|
||||
if not isinstance(e, (NotFoundException, ErrorException)):
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
# Return a short form of the minifigure
|
||||
def short(self, /, *, from_download: bool = False) -> dict[str, Any]:
|
||||
return {
|
||||
'download': from_download,
|
||||
'image': self.url_for_image(),
|
||||
'name': self.fields.name,
|
||||
'figure': self.fields.figure,
|
||||
}
|
||||
|
||||
# Select a individual minifigure by ID
|
||||
def select_by_id(self, id: str, /) -> Self:
|
||||
# Save the ID parameter
|
||||
self.fields.id = id
|
||||
|
||||
# Import status list here to get metadata columns
|
||||
from .set_status_list import BrickSetStatusList
|
||||
|
||||
# Pass metadata columns to the query with correct table names for individual minifigures
|
||||
context = {
|
||||
'owners': ', ' + BrickSetOwnerList.as_columns(table='bricktracker_individual_minifigure_owners') if BrickSetOwnerList.list() else '',
|
||||
'statuses': ', ' + BrickSetStatusList.as_columns(table='bricktracker_individual_minifigure_statuses', all=True) if BrickSetStatusList.list(all=True) else '',
|
||||
'tags': ', ' + BrickSetTagList.as_columns(table='bricktracker_individual_minifigure_tags') if BrickSetTagList.list() else '',
|
||||
}
|
||||
|
||||
if not self.select(**context):
|
||||
raise NotFoundException(
|
||||
'Individual minifigure with ID {id} was not found in the database'.format(
|
||||
id=id,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# URL to this individual minifigure instance
|
||||
def url(self, /) -> str:
|
||||
return url_for('individual_minifigure.details', id=self.fields.id)
|
||||
|
||||
# URL for updating quantity
|
||||
def url_for_quantity(self, /) -> str:
|
||||
return url_for('individual_minifigure.update_quantity', id=self.fields.id)
|
||||
|
||||
# URL for updating description
|
||||
def url_for_description(self, /) -> str:
|
||||
return url_for('individual_minifigure.update_description', id=self.fields.id)
|
||||
|
||||
# Parts
|
||||
def generic_parts(self, /):
|
||||
from .part_list import BrickPartList
|
||||
return BrickPartList().from_individual_minifigure(self)
|
||||
|
||||
# Override from_rebrickable to handle minifigure data
|
||||
@staticmethod
|
||||
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
|
||||
# Extracting number
|
||||
number = int(str(data['set_num'])[5:])
|
||||
|
||||
return {
|
||||
'figure': str(data['set_num']),
|
||||
'number': int(number),
|
||||
'name': str(data['set_name']),
|
||||
'image': data.get('set_img_url'),
|
||||
'number_of_parts': int(data.get('num_parts', 0)),
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
import logging
|
||||
from typing import Self
|
||||
|
||||
from .individual_minifigure import IndividualMinifigure
|
||||
from .record_list import BrickRecordList
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Individual minifigures list
|
||||
class IndividualMinifigureList(BrickRecordList[IndividualMinifigure]):
|
||||
# Queries
|
||||
instances_by_figure_query: str = 'individual_minifigure/select/instances_by_figure'
|
||||
using_storage_query: str = 'individual_minifigure/list/using_storage'
|
||||
without_storage_query: str = 'individual_minifigure/list/without_storage'
|
||||
|
||||
def __init__(self, /):
|
||||
super().__init__()
|
||||
|
||||
# Load all individual instances of a specific minifigure figure
|
||||
def instances_by_figure(self, figure: str, /) -> Self:
|
||||
# Save the figure parameter
|
||||
self.fields.figure = figure
|
||||
|
||||
# Import metadata lists to get columns
|
||||
from .set_owner_list import BrickSetOwnerList
|
||||
from .set_status_list import BrickSetStatusList
|
||||
from .set_tag_list import BrickSetTagList
|
||||
|
||||
# Prepare context with metadata columns
|
||||
context = {
|
||||
'owners': BrickSetOwnerList.as_columns(table='bricktracker_individual_minifigure_owners') if BrickSetOwnerList.list() else 'NULL AS "no_owners"',
|
||||
'statuses': BrickSetStatusList.as_columns(table='bricktracker_individual_minifigure_statuses', all=True) if BrickSetStatusList.list(all=True) else 'NULL AS "no_statuses"',
|
||||
'tags': BrickSetTagList.as_columns(table='bricktracker_individual_minifigure_tags') if BrickSetTagList.list() else 'NULL AS "no_tags"',
|
||||
}
|
||||
|
||||
# Load the instances from the database
|
||||
self.list(override_query=self.instances_by_figure_query, **context)
|
||||
|
||||
return self
|
||||
|
||||
# Load all individual minifigures using a specific storage
|
||||
def using_storage(self, storage: 'BrickSetStorage', /) -> Self:
|
||||
# Save the storage parameter
|
||||
self.fields.storage = storage.fields.id
|
||||
|
||||
# Load the minifigures from the database
|
||||
self.list(override_query=self.using_storage_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load all individual minifigures without storage
|
||||
def without_storage(self, /) -> Self:
|
||||
# Load minifigures with no storage
|
||||
self.list(override_query=self.without_storage_query)
|
||||
|
||||
return self
|
||||
|
||||
# Base individual minifigure list
|
||||
def list(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
override_query: str | None = None,
|
||||
order: str | None = None,
|
||||
limit: int | None = None,
|
||||
**context,
|
||||
) -> None:
|
||||
# Load the individual minifigures from the database
|
||||
for record in super().select(
|
||||
override_query=override_query,
|
||||
order=order,
|
||||
limit=limit,
|
||||
**context
|
||||
):
|
||||
individual_minifigure = IndividualMinifigure(record=record)
|
||||
self.records.append(individual_minifigure)
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import datetime, timezone
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import urljoin
|
||||
from shutil import copyfileobj
|
||||
import traceback
|
||||
from typing import Tuple, TYPE_CHECKING
|
||||
@@ -11,6 +12,8 @@ import humanize
|
||||
import requests
|
||||
from werkzeug.datastructures import FileStorage
|
||||
from werkzeug.utils import secure_filename
|
||||
import re
|
||||
import cloudscraper
|
||||
|
||||
from .exceptions import ErrorException, DownloadException
|
||||
if TYPE_CHECKING:
|
||||
@@ -89,91 +92,74 @@ class BrickInstructions(object):
|
||||
|
||||
# Download an instruction file
|
||||
def download(self, path: str, /) -> None:
|
||||
"""
|
||||
Streams the PDF in chunks and uses self.socket.update_total
|
||||
+ self.socket.progress_count to drive a determinate bar.
|
||||
"""
|
||||
try:
|
||||
# Just to make sure that the progress is initiated
|
||||
self.socket.progress(
|
||||
message='Downloading {file}'.format(
|
||||
file=self.filename,
|
||||
)
|
||||
)
|
||||
|
||||
target = self.path(filename=secure_filename(self.filename))
|
||||
|
||||
# Skipping rather than failing here
|
||||
# Skip if we already have it
|
||||
if os.path.isfile(target):
|
||||
self.socket.complete(
|
||||
message='File {file} already exists, skipped'.format(
|
||||
file=self.filename,
|
||||
)
|
||||
pdf_url = self.url()
|
||||
return self.socket.complete(
|
||||
message=f'File {self.filename} already exists, skipped - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
|
||||
)
|
||||
|
||||
else:
|
||||
url = current_app.config['REBRICKABLE_LINK_INSTRUCTIONS_PATTERN'].format( # noqa: E501
|
||||
path=path
|
||||
)
|
||||
trimmed_url = current_app.config['REBRICKABLE_LINK_INSTRUCTIONS_PATTERN'].format( # noqa: E501
|
||||
path=path.partition('/')[0]
|
||||
)
|
||||
# Fetch PDF via cloudscraper (to bypass Cloudflare)
|
||||
scraper = cloudscraper.create_scraper()
|
||||
scraper.headers.update({
|
||||
"User-Agent": current_app.config['REBRICKABLE_USER_AGENT']
|
||||
})
|
||||
resp = scraper.get(path, stream=True)
|
||||
if not resp.ok:
|
||||
raise DownloadException(f"Failed to download: HTTP {resp.status_code}")
|
||||
|
||||
# Request the file
|
||||
self.socket.progress(
|
||||
message='Requesting {url}'.format(
|
||||
url=trimmed_url,
|
||||
)
|
||||
)
|
||||
# Tell the socket how many bytes in total
|
||||
total = int(resp.headers.get("Content-Length", 0))
|
||||
self.socket.update_total(total)
|
||||
|
||||
response = requests.get(url, stream=True)
|
||||
if response.ok:
|
||||
# Reset the counter and kick off at 0%
|
||||
self.socket.progress_count = 0
|
||||
self.socket.progress(message=f"Starting download {self.filename}")
|
||||
|
||||
# Store the content header as size
|
||||
try:
|
||||
self.size = int(
|
||||
response.headers.get('Content-length', 0)
|
||||
)
|
||||
except Exception:
|
||||
self.size = 0
|
||||
# Write out in 8 KiB chunks and update the counter
|
||||
with open(target, "wb") as f:
|
||||
for chunk in resp.iter_content(chunk_size=8192):
|
||||
if not chunk:
|
||||
continue
|
||||
f.write(chunk)
|
||||
|
||||
# Downloading the file
|
||||
# Bump the internal counter and emit
|
||||
self.socket.progress_count += len(chunk)
|
||||
self.socket.progress(
|
||||
message='Downloading {url} ({size})'.format(
|
||||
url=trimmed_url,
|
||||
size=self.human_size(),
|
||||
message=(
|
||||
f"Downloading {self.filename} "
|
||||
f"({humanize.naturalsize(self.socket.progress_count)}/"
|
||||
f"{humanize.naturalsize(self.socket.progress_total)})"
|
||||
)
|
||||
)
|
||||
|
||||
with open(target, 'wb') as f:
|
||||
copyfileobj(response.raw, f)
|
||||
else:
|
||||
raise DownloadException('failed to download: {code}'.format( # noqa: E501
|
||||
code=response.status_code
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('The instruction file {file} has been downloaded'.format( # noqa: E501
|
||||
file=self.filename
|
||||
))
|
||||
|
||||
# Complete
|
||||
self.socket.complete(
|
||||
message='File {file} downloaded ({size})'.format( # noqa: E501
|
||||
file=self.filename,
|
||||
size=self.human_size()
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.socket.fail(
|
||||
message='Error while downloading instruction {file}: {error}'.format( # noqa: E501
|
||||
file=self.filename,
|
||||
error=e,
|
||||
)
|
||||
# Done!
|
||||
logger.info(f"Downloaded {self.filename}")
|
||||
pdf_url = self.url()
|
||||
self.socket.complete(
|
||||
message=f'File {self.filename} downloaded ({self.human_size()}) - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(traceback.format_exc())
|
||||
self.socket.fail(
|
||||
message=f"Error downloading {self.filename}: {e}"
|
||||
)
|
||||
|
||||
# Display the size in a human format
|
||||
def human_size(self) -> str:
|
||||
return humanize.naturalsize(self.size)
|
||||
try:
|
||||
size = self.size
|
||||
except AttributeError:
|
||||
size = os.path.getsize(self.path())
|
||||
return humanize.naturalsize(size)
|
||||
|
||||
# Display the time in a human format
|
||||
def human_time(self) -> str:
|
||||
@@ -250,40 +236,52 @@ class BrickInstructions(object):
|
||||
# Find the instructions for a set
|
||||
@staticmethod
|
||||
def find_instructions(set: str, /) -> list[Tuple[str, str]]:
|
||||
response = requests.get(
|
||||
current_app.config['REBRICKABLE_LINK_INSTRUCTIONS_PATTERN'].format(
|
||||
path=set,
|
||||
),
|
||||
headers={
|
||||
'User-Agent': current_app.config['REBRICKABLE_USER_AGENT']
|
||||
}
|
||||
)
|
||||
"""
|
||||
Scrape Rebrickable’s HTML and return a list of
|
||||
(filename_slug, download_url). Duplicate slugs get _1, _2, …
|
||||
"""
|
||||
page_url = f"https://rebrickable.com/instructions/{set}/"
|
||||
logger.debug(f"[find_instructions] fetching HTML from {page_url!r}")
|
||||
|
||||
if not response.ok:
|
||||
raise ErrorException('Failed to load the Rebrickable instructions page. Status code: {code}'.format( # noqa: E501
|
||||
code=response.status_code
|
||||
))
|
||||
# Solve Cloudflare’s challenge
|
||||
scraper = cloudscraper.create_scraper()
|
||||
scraper.headers.update({'User-Agent': current_app.config['REBRICKABLE_USER_AGENT']})
|
||||
resp = scraper.get(page_url)
|
||||
if not resp.ok:
|
||||
raise ErrorException(f'Failed to load instructions page for {set}. HTTP {resp.status_code}')
|
||||
|
||||
# Parse the HTML content
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
soup = BeautifulSoup(resp.content, 'html.parser')
|
||||
link_re = re.compile(r'^/instructions/\d+/.+/download/')
|
||||
|
||||
# Collect all <img> tags with "LEGO Building Instructions" in the
|
||||
# alt attribute
|
||||
found_tags: list[Tuple[str, str]] = []
|
||||
for a_tag in soup.find_all('a', href=True):
|
||||
img_tag = a_tag.find('img', alt=True)
|
||||
if img_tag and "LEGO Building Instructions" in img_tag['alt']:
|
||||
found_tags.append(
|
||||
(
|
||||
img_tag['alt'].removeprefix('LEGO Building Instructions for '), # noqa: E501
|
||||
a_tag['href']
|
||||
)
|
||||
) # Save alt and href
|
||||
raw: list[tuple[str, str]] = []
|
||||
for a in soup.find_all('a', href=link_re):
|
||||
img = a.find('img', alt=True) # type: ignore
|
||||
if not img or set not in img['alt']: # type: ignore
|
||||
continue
|
||||
|
||||
# Raise an error if nothing found
|
||||
if not len(found_tags):
|
||||
raise ErrorException('No instruction found for set {set}'.format(
|
||||
set=set
|
||||
))
|
||||
# Turn the alt text into a slug
|
||||
alt_text = img['alt'].removeprefix('LEGO Building Instructions for ') # type: ignore
|
||||
slug = re.sub(r'[^A-Za-z0-9]+', '-', alt_text).strip('-')
|
||||
|
||||
return found_tags
|
||||
# Build the absolute download URL
|
||||
download_url = urljoin('https://rebrickable.com', a['href']) # type: ignore
|
||||
raw.append((slug, download_url))
|
||||
|
||||
if not raw:
|
||||
raise ErrorException(f'No download links found on instructions page for {set}')
|
||||
|
||||
# Disambiguate duplicate slugs by appending _1, _2, …
|
||||
from collections import Counter, defaultdict
|
||||
counts = Counter(name for name, _ in raw)
|
||||
seen: dict[str, int] = defaultdict(int)
|
||||
unique: list[tuple[str, str]] = []
|
||||
for name, url in raw:
|
||||
idx = seen[name]
|
||||
if counts[name] > 1 and idx > 0:
|
||||
final_name = f"{name}_{idx}"
|
||||
else:
|
||||
final_name = name
|
||||
seen[name] += 1
|
||||
unique.append((final_name, url))
|
||||
|
||||
return unique
|
||||
|
||||
@@ -0,0 +1,342 @@
|
||||
import logging
|
||||
from sqlite3 import Row
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .exceptions import DatabaseException, ErrorException, NotFoundException
|
||||
from .record import BrickRecord
|
||||
from .sql import BrickSQL
|
||||
if TYPE_CHECKING:
|
||||
from .individual_minifigure import IndividualMinifigure
|
||||
from .set import BrickSet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego set metadata (customizable list of entries that can be checked)
|
||||
class BrickMetadata(BrickRecord):
|
||||
kind: str
|
||||
|
||||
# Endpoints (optional, not all metadata types use all of these)
|
||||
set_state_endpoint: str = ''
|
||||
individual_minifigure_state_endpoint: str = ''
|
||||
individual_minifigure_value_endpoint: str = ''
|
||||
|
||||
# Queries
|
||||
delete_query: str
|
||||
insert_query: str
|
||||
select_query: str
|
||||
update_field_query: str
|
||||
update_set_state_query: str = ''
|
||||
update_set_value_query: str = ''
|
||||
update_individual_minifigure_state_query: str = ''
|
||||
update_individual_minifigure_value_query: str = ''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
record: Row | dict[str, Any] | None = None,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# Defined an empty ID
|
||||
self.fields.id = None
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# SQL column name
|
||||
def as_column(self, /) -> str:
|
||||
return '{kind}_{id}'.format(
|
||||
id=self.fields.id,
|
||||
kind=self.kind.lower().replace(' ', '-')
|
||||
)
|
||||
|
||||
# HTML dataset name
|
||||
def as_dataset(self, /) -> str:
|
||||
return self.as_column().replace('_', '-')
|
||||
|
||||
# Delete from database
|
||||
def delete(self, /) -> None:
|
||||
BrickSQL().executescript(
|
||||
self.delete_query,
|
||||
id=self.fields.id,
|
||||
)
|
||||
|
||||
# Grab data from a form
|
||||
def from_form(self, form: dict[str, str], /) -> Self:
|
||||
name = form.get('name', None)
|
||||
|
||||
if name is None or name == '':
|
||||
raise ErrorException('Status name cannot be empty')
|
||||
|
||||
self.fields.name = name
|
||||
|
||||
return self
|
||||
|
||||
# Insert into database
|
||||
def insert(self, /, **context) -> None:
|
||||
self.safe()
|
||||
|
||||
# Generate an ID for the metadata (with underscores to make it
|
||||
# column name friendly)
|
||||
self.fields.id = str(uuid4()).replace('-', '_')
|
||||
|
||||
BrickSQL().executescript(
|
||||
self.insert_query,
|
||||
id=self.fields.id,
|
||||
name=self.fields.safe_name,
|
||||
**context
|
||||
)
|
||||
|
||||
# Rename the entry
|
||||
def rename(self, /) -> None:
|
||||
self.update_field('name', value=self.fields.name)
|
||||
|
||||
# Make the name "safe"
|
||||
# Security: eh.
|
||||
def safe(self, /) -> None:
|
||||
# Prevent self-ownage with accidental quote escape
|
||||
self.fields.safe_name = self.fields.name.replace("'", "''")
|
||||
|
||||
# URL to change the selected state of this metadata item for a set
|
||||
def url_for_set_state(self, id: str, /) -> str:
|
||||
return url_for(
|
||||
self.set_state_endpoint,
|
||||
id=id,
|
||||
metadata_id=self.fields.id
|
||||
)
|
||||
|
||||
# URL to change the selected state of this metadata item for an individual minifigure
|
||||
def url_for_individual_minifigure_state(self, id: str, /) -> str:
|
||||
return url_for(
|
||||
self.individual_minifigure_state_endpoint,
|
||||
id=id,
|
||||
metadata_id=self.fields.id
|
||||
)
|
||||
|
||||
# URL to change the value for an individual minifigure
|
||||
def url_for_individual_minifigure_value(self, id: str, /) -> str:
|
||||
return url_for(
|
||||
self.individual_minifigure_value_endpoint,
|
||||
id=id
|
||||
)
|
||||
|
||||
# Select a specific metadata (with an id)
|
||||
def select_specific(self, id: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.id = id
|
||||
|
||||
# Load from database
|
||||
if not self.select():
|
||||
raise NotFoundException(
|
||||
'{kind} with ID {id} was not found in the database'.format(
|
||||
kind=self.kind.capitalize(),
|
||||
id=self.fields.id,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Update a field
|
||||
def update_field(
|
||||
self,
|
||||
field: str,
|
||||
/,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
value: Any | None = None
|
||||
) -> Any:
|
||||
if value is None and json is not None:
|
||||
value = json.get('value', None)
|
||||
|
||||
if value is None:
|
||||
raise ErrorException('"{field}" of a {kind} cannot be set to an empty value'.format( # noqa: E501
|
||||
field=field,
|
||||
kind=self.kind
|
||||
))
|
||||
|
||||
if field == 'id' or not hasattr(self.fields, field):
|
||||
raise NotFoundException('"{field}" is not a field of a {kind}'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
field=field
|
||||
))
|
||||
|
||||
parameters = self.sql_parameters()
|
||||
parameters['value'] = value
|
||||
|
||||
# Update the status
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
self.update_field_query,
|
||||
parameters=parameters,
|
||||
field=field,
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the field "{field}" for {kind} "{name}" ({id})'.format( # noqa: E501
|
||||
field=field,
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('{kind} "{name}" ({id}): field "{field}" changed to "{value}"'.format( # noqa: E501
|
||||
kind=self.kind.capitalize(),
|
||||
name=self.fields.name,
|
||||
id=self.fields.id,
|
||||
field=field,
|
||||
value=value,
|
||||
))
|
||||
|
||||
return value
|
||||
|
||||
# Update the selected state of this metadata item for a set
|
||||
def update_set_state(
|
||||
self,
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
state: Any | None = None
|
||||
) -> Any:
|
||||
if state is None and json is not None:
|
||||
state = json.get('value', False)
|
||||
|
||||
parameters = self.sql_parameters()
|
||||
parameters['set_id'] = brickset.fields.id
|
||||
parameters['state'] = state
|
||||
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
self.update_set_state_query,
|
||||
parameters=parameters,
|
||||
name=self.as_column(),
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the {kind} "{name}" state for set {set} ({id})'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
set=brickset.fields.set,
|
||||
id=brickset.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('{kind} "{name}" state changed to "{state}" for set {set} ({id})'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
state=state,
|
||||
set=brickset.fields.set,
|
||||
id=brickset.fields.id,
|
||||
))
|
||||
|
||||
return state
|
||||
|
||||
# Check if this metadata has a specific individual minifigure
|
||||
def has_individual_minifigure(
|
||||
self,
|
||||
individual_minifigure: 'IndividualMinifigure',
|
||||
/,
|
||||
) -> bool:
|
||||
"""Check if this owner/tag/status is assigned to a individual minifigure"""
|
||||
# Determine the table name based on metadata type
|
||||
table_name = f'bricktracker_individual_minifigure_{self.kind}s'
|
||||
column_name = f'{self.kind}_{self.fields.id}'
|
||||
|
||||
# Query to check if the relationship exists using raw SQL
|
||||
sql = BrickSQL()
|
||||
query = f'SELECT COUNT(*) as count FROM "{table_name}" WHERE "id" = ? AND "{column_name}" = 1'
|
||||
result = sql.cursor.execute(query, (individual_minifigure.fields.id,)).fetchone()
|
||||
|
||||
return result and result['count'] > 0
|
||||
|
||||
# Update the selected state of this metadata item for a individual minifigure
|
||||
def update_individual_minifigure_state(
|
||||
self,
|
||||
individual_minifigure: 'IndividualMinifigure',
|
||||
/,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
state: Any | None = None
|
||||
) -> Any:
|
||||
if state is None and json is not None:
|
||||
state = json.get('value', False)
|
||||
|
||||
parameters = self.sql_parameters()
|
||||
parameters['id'] = individual_minifigure.fields.id
|
||||
parameters['state'] = state
|
||||
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
self.update_individual_minifigure_state_query,
|
||||
parameters=parameters,
|
||||
name=self.as_column(),
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the {kind} "{name}" state for individual minifigure {figure} ({id})'.format(
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
figure=individual_minifigure.fields.figure,
|
||||
id=individual_minifigure.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('{kind} "{name}" state changed to "{state}" for individual minifigure {figure} ({id})'.format(
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
state=state,
|
||||
figure=individual_minifigure.fields.figure,
|
||||
id=individual_minifigure.fields.id,
|
||||
))
|
||||
|
||||
return state
|
||||
|
||||
# Update the selected value of this metadata item for a set
|
||||
def update_set_value(
|
||||
self,
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
value: Any | None = None,
|
||||
) -> Any:
|
||||
if value is None and json is not None:
|
||||
value = json.get('value', '')
|
||||
|
||||
if value == '':
|
||||
value = None
|
||||
|
||||
parameters = self.sql_parameters()
|
||||
parameters['set_id'] = brickset.fields.id
|
||||
parameters['value'] = value
|
||||
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
self.update_set_value_query,
|
||||
parameters=parameters,
|
||||
)
|
||||
|
||||
# Update the status
|
||||
if value is None and not hasattr(self.fields, 'name'):
|
||||
self.fields.name = 'None'
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the {kind} value for set {set} ({id})'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
set=brickset.fields.set,
|
||||
id=brickset.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('{kind} value changed to "{name}" ({value}) for set {set} ({id})'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
name=self.fields.name,
|
||||
value=value,
|
||||
set=brickset.fields.set,
|
||||
id=brickset.fields.id,
|
||||
))
|
||||
|
||||
return value
|
||||
@@ -0,0 +1,198 @@
|
||||
import logging
|
||||
from typing import List, overload, Self, Type, TypeVar
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .exceptions import ErrorException, NotFoundException
|
||||
from .fields import BrickRecordFields
|
||||
from .record_list import BrickRecordList
|
||||
from .set_owner import BrickSetOwner
|
||||
from .set_purchase_location import BrickSetPurchaseLocation
|
||||
from .set_status import BrickSetStatus
|
||||
from .set_storage import BrickSetStorage
|
||||
from .set_tag import BrickSetTag
|
||||
from .wish_owner import BrickWishOwner
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar(
|
||||
'T',
|
||||
BrickSetOwner,
|
||||
BrickSetPurchaseLocation,
|
||||
BrickSetStatus,
|
||||
BrickSetStorage,
|
||||
BrickSetTag,
|
||||
BrickWishOwner
|
||||
)
|
||||
|
||||
|
||||
# Lego sets metadata list
|
||||
class BrickMetadataList(BrickRecordList[T]):
|
||||
kind: str
|
||||
mapping: dict[str, T]
|
||||
model: Type[T]
|
||||
|
||||
# Database
|
||||
table: str
|
||||
order: str
|
||||
|
||||
# Queries
|
||||
select_query: str
|
||||
|
||||
# List-specific endpoints (for operations on the list itself)
|
||||
set_state_endpoint: str = ''
|
||||
set_value_endpoint: str = ''
|
||||
individual_minifigure_value_endpoint: str = ''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: Type[T],
|
||||
/,
|
||||
*,
|
||||
force: bool = False,
|
||||
records: list[T] | None = None
|
||||
):
|
||||
self.model = model
|
||||
|
||||
# Records override (masking the class variables with instance ones)
|
||||
if records is not None:
|
||||
self.override()
|
||||
|
||||
for metadata in records:
|
||||
self.records.append(metadata)
|
||||
self.mapping[metadata.fields.id] = metadata
|
||||
else:
|
||||
# Load metadata only if there is none already loaded
|
||||
records = getattr(self, 'records', None)
|
||||
|
||||
if records is None or force:
|
||||
# Don't use super()__init__ as it would mask class variables
|
||||
self.fields = BrickRecordFields()
|
||||
|
||||
logger.info('Loading {kind} list'.format(
|
||||
kind=self.kind
|
||||
))
|
||||
|
||||
self.__class__.records = []
|
||||
self.__class__.mapping = {}
|
||||
|
||||
# Load the metadata from the database
|
||||
for record in self.select(order=self.order):
|
||||
metadata = model(record=record)
|
||||
|
||||
self.__class__.records.append(metadata)
|
||||
self.__class__.mapping[metadata.fields.id] = metadata
|
||||
|
||||
# HTML prefix name
|
||||
def as_prefix(self, /) -> str:
|
||||
return self.kind.replace(' ', '-')
|
||||
|
||||
# Filter the list of records (this one does nothing)
|
||||
def filter(self) -> list[T]:
|
||||
return self.records
|
||||
|
||||
# Add a layer of override data
|
||||
def override(self) -> None:
|
||||
self.fields = BrickRecordFields()
|
||||
|
||||
self.records = []
|
||||
self.mapping = {}
|
||||
|
||||
# Return the items as columns for a select
|
||||
@classmethod
|
||||
def as_columns(cls, /, table: str | None = None, **kwargs) -> str:
|
||||
new = cls.new()
|
||||
|
||||
# Use provided table name or default to class table
|
||||
table_name = table if table is not None else cls.table
|
||||
|
||||
return ', '.join([
|
||||
'"{table}"."{column}"'.format(
|
||||
table=table_name,
|
||||
column=record.as_column(),
|
||||
)
|
||||
for record
|
||||
in new.filter(**kwargs)
|
||||
])
|
||||
|
||||
# Return the items as a dictionary mapping column names to UUIDs
|
||||
@classmethod
|
||||
def as_column_mapping(cls, /, **kwargs) -> dict:
|
||||
new = cls.new()
|
||||
|
||||
return {
|
||||
record.as_column(): record.fields.id
|
||||
for record in new.filter(**kwargs)
|
||||
}
|
||||
|
||||
# Grab a specific status
|
||||
@classmethod
|
||||
def get(cls, id: str | None, /, *, allow_none: bool = False) -> T:
|
||||
new = cls.new()
|
||||
|
||||
if allow_none and (id == '' or id is None):
|
||||
return new.model()
|
||||
|
||||
if id is None:
|
||||
raise ErrorException('Cannot get {kind} with no ID'.format(
|
||||
kind=new.kind.capitalize()
|
||||
))
|
||||
|
||||
if id not in new.mapping:
|
||||
raise NotFoundException(
|
||||
'{kind} with ID {id} was not found in the database'.format(
|
||||
kind=new.kind.capitalize(),
|
||||
id=id,
|
||||
),
|
||||
)
|
||||
|
||||
return new.mapping[id]
|
||||
|
||||
# Get the list of statuses depending on the context
|
||||
@overload
|
||||
@classmethod
|
||||
def list(cls, /, **kwargs) -> List[T]: ...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
def list(cls, /, as_class: bool = False, **kwargs) -> Self: ...
|
||||
|
||||
@classmethod
|
||||
def list(cls, /, as_class: bool = False, **kwargs) -> List[T] | Self:
|
||||
new = cls.new()
|
||||
list = new.filter(**kwargs)
|
||||
|
||||
if as_class:
|
||||
# Return a copy of the metadata list with overriden records
|
||||
return cls(new.model, records=list)
|
||||
else:
|
||||
return list
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
raise Exception('new() is not implemented for BrickMetadataList')
|
||||
|
||||
# URL to change the selected state of this metadata item for a set
|
||||
@classmethod
|
||||
def url_for_set_state(cls, id: str, /) -> str:
|
||||
return url_for(
|
||||
cls.set_state_endpoint,
|
||||
id=id,
|
||||
)
|
||||
|
||||
# URL to change the selected value of this metadata item for a set
|
||||
@classmethod
|
||||
def url_for_set_value(cls, id: str, /) -> str:
|
||||
return url_for(
|
||||
cls.set_value_endpoint,
|
||||
id=id,
|
||||
)
|
||||
|
||||
# URL to change the selected value of this metadata item for an individual minifigure
|
||||
@classmethod
|
||||
def url_for_individual_minifigure_value(cls, id: str, /) -> str:
|
||||
return url_for(
|
||||
cls.individual_minifigure_value_endpoint,
|
||||
id=id,
|
||||
)
|
||||
@@ -0,0 +1,29 @@
|
||||
from typing import Any, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..sql import BrickSQL
|
||||
|
||||
|
||||
# Grab the list of checkboxes to create a list of SQL columns
|
||||
def migration_0007(sql: 'BrickSQL', /) -> dict[str, Any]:
|
||||
# Don't realy on sql files as they could be removed in the future
|
||||
sql.cursor.execute('SELECT "bricktracker_set_checkboxes"."id" FROM "bricktracker_set_checkboxes"') # noqa: E501
|
||||
records = sql.cursor.fetchall()
|
||||
|
||||
return {
|
||||
'sources': ', '.join([
|
||||
'"bricktracker_set_statuses_old"."status_{id}"'.format(id=record['id']) # noqa: E501
|
||||
for record
|
||||
in records
|
||||
]),
|
||||
'targets': ', '.join([
|
||||
'"status_{id}"'.format(id=record['id'])
|
||||
for record
|
||||
in records
|
||||
]),
|
||||
'structure': ', '.join([
|
||||
'"status_{id}" BOOLEAN NOT NULL DEFAULT 0'.format(id=record['id'])
|
||||
for record
|
||||
in records
|
||||
])
|
||||
}
|
||||
+60
-110
@@ -1,48 +1,68 @@
|
||||
from sqlite3 import Row
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
|
||||
from flask import current_app, url_for
|
||||
import logging
|
||||
import traceback
|
||||
from typing import Self, TYPE_CHECKING
|
||||
|
||||
from .exceptions import ErrorException, NotFoundException
|
||||
from .part_list import BrickPartList
|
||||
from .rebrickable_image import RebrickableImage
|
||||
from .record import BrickRecord
|
||||
from .rebrickable_minifigure import RebrickableMinifigure
|
||||
if TYPE_CHECKING:
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego minifigure
|
||||
class BrickMinifigure(BrickRecord):
|
||||
brickset: 'BrickSet | None'
|
||||
|
||||
class BrickMinifigure(RebrickableMinifigure):
|
||||
# Queries
|
||||
insert_query: str = 'minifigure/insert'
|
||||
generic_query: str = 'minifigure/select/generic'
|
||||
select_query: str = 'minifigure/select/specific'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
record: Row | dict[str, Any] | None = None,
|
||||
):
|
||||
super().__init__()
|
||||
# Import a minifigure into the database
|
||||
def download(self, socket: 'BrickSocket', refresh: bool = False) -> bool:
|
||||
if self.brickset is None:
|
||||
raise ErrorException('Importing a minifigure from Rebrickable outside of a set is not supported') # noqa: E501
|
||||
|
||||
# Save the brickset
|
||||
self.brickset = brickset
|
||||
try:
|
||||
# Insert into the database
|
||||
socket.auto_progress(
|
||||
message='Set {set}: inserting minifigure {figure} into database'.format( # noqa: E501
|
||||
set=self.brickset.fields.set,
|
||||
figure=self.fields.figure
|
||||
)
|
||||
)
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
if not refresh:
|
||||
# Insert into database
|
||||
self.insert(commit=False)
|
||||
|
||||
# Return the number just in digits format
|
||||
def clean_number(self, /) -> str:
|
||||
number: str = self.fields.fig_num
|
||||
number = number.removeprefix('fig-')
|
||||
number = number.lstrip('0')
|
||||
# Load the inventory
|
||||
if not BrickPartList.download(
|
||||
socket,
|
||||
self.brickset,
|
||||
minifigure=self,
|
||||
refresh=refresh
|
||||
):
|
||||
return False
|
||||
|
||||
return number
|
||||
# Insert the rebrickable set into database (after counting parts)
|
||||
self.insert_rebrickable()
|
||||
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error while importing minifigure {figure} from {set}: {error}'.format( # noqa: E501
|
||||
figure=self.fields.figure,
|
||||
set=self.brickset.fields.set,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Parts
|
||||
def generic_parts(self, /) -> BrickPartList:
|
||||
@@ -51,108 +71,38 @@ class BrickMinifigure(BrickRecord):
|
||||
# Parts
|
||||
def parts(self, /) -> BrickPartList:
|
||||
if self.brickset is None:
|
||||
raise ErrorException('Part list for minifigure {number} requires a brickset'.format( # noqa: E501
|
||||
number=self.fields.fig_num,
|
||||
raise ErrorException('Part list for minifigure {figure} requires a brickset'.format( # noqa: E501
|
||||
figure=self.fields.figure,
|
||||
))
|
||||
|
||||
return BrickPartList().load(self.brickset, minifigure=self)
|
||||
return BrickPartList().list_specific(self.brickset, minifigure=self)
|
||||
|
||||
# Select a generic minifigure
|
||||
def select_generic(self, fig_num: str, /) -> Self:
|
||||
def select_generic(self, figure: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.fig_num = fig_num
|
||||
self.fields.figure = figure
|
||||
|
||||
if not self.select(override_query=self.generic_query):
|
||||
raise NotFoundException(
|
||||
'Minifigure with number {number} was not found in the database'.format( # noqa: E501
|
||||
number=self.fields.fig_num,
|
||||
'Minifigure with figure {figure} was not found in the database'.format( # noqa: E501
|
||||
figure=self.fields.figure,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Select a specific minifigure (with a set and an number)
|
||||
def select_specific(self, brickset: 'BrickSet', fig_num: str, /) -> Self:
|
||||
# Select a specific minifigure (with a set and a figure)
|
||||
def select_specific(self, brickset: 'BrickSet', figure: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.brickset = brickset
|
||||
self.fields.fig_num = fig_num
|
||||
self.fields.figure = figure
|
||||
|
||||
if not self.select():
|
||||
raise NotFoundException(
|
||||
'Minifigure with number {number} from set {set} was not found in the database'.format( # noqa: E501
|
||||
number=self.fields.fig_num,
|
||||
'Minifigure with figure {figure} from set {set} was not found in the database'.format( # noqa: E501
|
||||
figure=self.fields.figure,
|
||||
set=self.brickset.fields.set,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Return a dict with common SQL parameters for a minifigure
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters = super().sql_parameters()
|
||||
|
||||
# Supplement from the brickset
|
||||
if self.brickset is not None:
|
||||
if 'u_id' not in parameters:
|
||||
parameters['u_id'] = self.brickset.fields.id
|
||||
|
||||
if 'set_num' not in parameters:
|
||||
parameters['set_num'] = self.brickset.fields.set
|
||||
|
||||
return parameters
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for(
|
||||
'minifigure.details',
|
||||
number=self.fields.fig_num,
|
||||
)
|
||||
|
||||
# Compute the url for minifigure part image
|
||||
def url_for_image(self, /) -> str:
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
if self.fields.set_img_url is None:
|
||||
file = RebrickableImage.nil_minifigure_name()
|
||||
else:
|
||||
file = self.fields.fig_num
|
||||
|
||||
return RebrickableImage.static_url(file, 'MINIFIGURES_FOLDER')
|
||||
else:
|
||||
if self.fields.set_img_url is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL_MINIFIGURE']
|
||||
else:
|
||||
return self.fields.set_img_url
|
||||
|
||||
# Compute the url for the rebrickable page
|
||||
def url_for_rebrickable(self, /) -> str:
|
||||
if current_app.config['REBRICKABLE_LINKS']:
|
||||
try:
|
||||
return current_app.config['REBRICKABLE_LINK_MINIFIGURE_PATTERN'].format( # noqa: E501
|
||||
number=self.fields.fig_num.lower(),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ''
|
||||
|
||||
# Normalize from Rebrickable
|
||||
@staticmethod
|
||||
def from_rebrickable(
|
||||
data: dict[str, Any],
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
**_,
|
||||
) -> dict[str, Any]:
|
||||
record = {
|
||||
'fig_num': data['set_num'],
|
||||
'name': data['set_name'],
|
||||
'quantity': data['quantity'],
|
||||
'set_img_url': data['set_img_url'],
|
||||
}
|
||||
|
||||
if brickset is not None:
|
||||
record['set_num'] = brickset.fields.set
|
||||
record['u_id'] = brickset.fields.id
|
||||
|
||||
return record
|
||||
|
||||
+178
-62
@@ -1,11 +1,17 @@
|
||||
import logging
|
||||
import traceback
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .minifigure import BrickMinifigure
|
||||
from .rebrickable import Rebrickable
|
||||
from .record_list import BrickRecordList
|
||||
if TYPE_CHECKING:
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego minifigures
|
||||
@@ -15,10 +21,12 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
|
||||
|
||||
# Queries
|
||||
all_query: str = 'minifigure/list/all'
|
||||
all_by_owner_query: str = 'minifigure/list/all_by_owner'
|
||||
damaged_part_query: str = 'minifigure/list/damaged_part'
|
||||
last_query: str = 'minifigure/list/last'
|
||||
missing_part_query: str = 'minifigure/list/missing_part'
|
||||
select_query: str = 'minifigure/list/from_set'
|
||||
using_part_query: str = 'minifigure/list/using_part'
|
||||
missing_part_query: str = 'minifigure/list/missing_part'
|
||||
|
||||
def __init__(self, /):
|
||||
super().__init__()
|
||||
@@ -31,13 +39,71 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
|
||||
|
||||
# Load all minifigures
|
||||
def all(self, /) -> Self:
|
||||
for record in self.select(
|
||||
override_query=self.all_query,
|
||||
order=self.order
|
||||
):
|
||||
minifigure = BrickMinifigure(record=record)
|
||||
self.list(override_query=self.all_query)
|
||||
|
||||
self.records.append(minifigure)
|
||||
return self
|
||||
|
||||
# Load all minifigures by owner
|
||||
def all_by_owner(self, owner_id: str | None = None, /) -> Self:
|
||||
# Save the owner_id parameter
|
||||
self.fields.owner_id = owner_id
|
||||
|
||||
# Load the minifigures from the database
|
||||
self.list(override_query=self.all_by_owner_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load minifigures with pagination support
|
||||
def all_filtered_paginated(
|
||||
self,
|
||||
owner_id: str | None = None,
|
||||
search_query: str | None = None,
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
sort_field: str | None = None,
|
||||
sort_order: str = 'asc'
|
||||
) -> tuple[Self, int]:
|
||||
# Prepare filter context
|
||||
filter_context = {}
|
||||
if owner_id and owner_id != 'all':
|
||||
filter_context['owner_id'] = owner_id
|
||||
list_query = self.all_by_owner_query
|
||||
else:
|
||||
list_query = self.all_query
|
||||
|
||||
if search_query:
|
||||
filter_context['search_query'] = search_query
|
||||
|
||||
# Field mapping for sorting
|
||||
field_mapping = {
|
||||
'name': '"combined"."name"',
|
||||
'parts': '"combined"."number_of_parts"',
|
||||
'quantity': '"total_quantity"',
|
||||
'missing': '"total_missing"',
|
||||
'damaged': '"total_damaged"',
|
||||
'sets': '"total_sets"',
|
||||
'individual': '"total_individual"'
|
||||
}
|
||||
|
||||
# Use the base pagination method
|
||||
return self.paginate(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
sort_field=sort_field,
|
||||
sort_order=sort_order,
|
||||
list_query=list_query,
|
||||
field_mapping=field_mapping,
|
||||
**filter_context
|
||||
)
|
||||
|
||||
# Minifigures with a part damaged part
|
||||
def damaged_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the minifigures from the database
|
||||
self.list(override_query=self.damaged_part_query)
|
||||
|
||||
return self
|
||||
|
||||
@@ -47,29 +113,78 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
|
||||
if current_app.config['RANDOM']:
|
||||
order = 'RANDOM()'
|
||||
else:
|
||||
order = 'minifigures.rowid DESC'
|
||||
order = '"combined"."rowid" DESC'
|
||||
|
||||
for record in self.select(
|
||||
override_query=self.last_query,
|
||||
order=order,
|
||||
limit=limit
|
||||
):
|
||||
minifigure = BrickMinifigure(record=record)
|
||||
|
||||
self.records.append(minifigure)
|
||||
self.list(override_query=self.last_query, order=order, limit=limit)
|
||||
|
||||
return self
|
||||
|
||||
# Base minifigure list
|
||||
def list(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
override_query: str | None = None,
|
||||
order: str | None = None,
|
||||
limit: int | None = None,
|
||||
**context: Any,
|
||||
) -> None:
|
||||
if order is None:
|
||||
order = self.order
|
||||
|
||||
if hasattr(self, 'brickset'):
|
||||
brickset = self.brickset
|
||||
else:
|
||||
brickset = None
|
||||
|
||||
# Prepare template context for owner filtering
|
||||
context_vars = {}
|
||||
if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None:
|
||||
context_vars['owner_id'] = self.fields.owner_id
|
||||
|
||||
# Merge with any additional context passed in
|
||||
context_vars.update(context)
|
||||
|
||||
# Load the sets from the database
|
||||
for record in super().select(
|
||||
override_query=override_query,
|
||||
order=order,
|
||||
limit=limit,
|
||||
**context_vars
|
||||
):
|
||||
minifigure = BrickMinifigure(brickset=brickset, record=record)
|
||||
|
||||
self.records.append(minifigure)
|
||||
|
||||
# Load minifigures from a brickset
|
||||
def load(self, brickset: 'BrickSet', /) -> Self:
|
||||
def from_set(self, brickset: 'BrickSet', /) -> Self:
|
||||
# Save the brickset
|
||||
self.brickset = brickset
|
||||
|
||||
# Load the minifigures from the database
|
||||
for record in self.select(order=self.order):
|
||||
minifigure = BrickMinifigure(brickset=self.brickset, record=record)
|
||||
self.list()
|
||||
|
||||
self.records.append(minifigure)
|
||||
return self
|
||||
|
||||
# Minifigures missing a part
|
||||
def missing_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the minifigures from the database
|
||||
self.list(override_query=self.missing_part_query)
|
||||
|
||||
return self
|
||||
|
||||
# Minifigure using a part
|
||||
def using_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the minifigures from the database
|
||||
self.list(override_query=self.using_part_query)
|
||||
|
||||
return self
|
||||
|
||||
@@ -78,57 +193,58 @@ class BrickMinifigureList(BrickRecordList[BrickMinifigure]):
|
||||
parameters: dict[str, Any] = super().sql_parameters()
|
||||
|
||||
if self.brickset is not None:
|
||||
parameters['u_id'] = self.brickset.fields.id
|
||||
parameters['set_num'] = self.brickset.fields.set
|
||||
parameters['id'] = self.brickset.fields.id
|
||||
|
||||
# Add owner_id parameter for owner filtering
|
||||
if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None:
|
||||
parameters['owner_id'] = self.fields.owner_id
|
||||
|
||||
return parameters
|
||||
|
||||
# Minifigures missing a part
|
||||
def missing_part(
|
||||
self,
|
||||
part_num: str,
|
||||
color_id: int,
|
||||
# Import the minifigures from Rebrickable
|
||||
@staticmethod
|
||||
def download(
|
||||
socket: 'BrickSocket',
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
*,
|
||||
element_id: int | None = None,
|
||||
) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part_num = part_num
|
||||
self.fields.color_id = color_id
|
||||
self.fields.element_id = element_id
|
||||
refresh: bool = False
|
||||
) -> bool:
|
||||
try:
|
||||
socket.auto_progress(
|
||||
message='Set {set}: loading minifigures from Rebrickable'.format( # noqa: E501
|
||||
set=brickset.fields.set,
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
# Load the minifigures from the database
|
||||
for record in self.select(
|
||||
override_query=self.missing_part_query,
|
||||
order=self.order
|
||||
):
|
||||
minifigure = BrickMinifigure(record=record)
|
||||
logger.debug('rebrick.lego.get_set_minifigs("{set}")'.format(
|
||||
set=brickset.fields.set,
|
||||
))
|
||||
|
||||
self.records.append(minifigure)
|
||||
minifigures = Rebrickable[BrickMinifigure](
|
||||
'get_set_minifigs',
|
||||
brickset.fields.set,
|
||||
BrickMinifigure,
|
||||
socket=socket,
|
||||
brickset=brickset,
|
||||
).list()
|
||||
|
||||
return self
|
||||
# Process each minifigure
|
||||
for minifigure in minifigures:
|
||||
if not minifigure.download(socket, refresh=refresh):
|
||||
return False
|
||||
|
||||
# Minifigure using a part
|
||||
def using_part(
|
||||
self,
|
||||
part_num: str,
|
||||
color_id: int,
|
||||
/,
|
||||
*,
|
||||
element_id: int | None = None,
|
||||
) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part_num = part_num
|
||||
self.fields.color_id = color_id
|
||||
self.fields.element_id = element_id
|
||||
return True
|
||||
|
||||
# Load the minifigures from the database
|
||||
for record in self.select(
|
||||
override_query=self.using_part_query,
|
||||
order=self.order
|
||||
):
|
||||
minifigure = BrickMinifigure(record=record)
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error while importing set {set} minifigure list: {error}'.format( # noqa: E501
|
||||
set=brickset.fields.set,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
self.records.append(minifigure)
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return self
|
||||
return False
|
||||
|
||||
@@ -11,9 +11,11 @@ NAVBAR: Final[list[dict[str, Any]]] = [
|
||||
{'e': 'set.list', 't': 'Sets', 'i': 'grid-line', 'f': 'HIDE_ALL_SETS'}, # noqa: E501
|
||||
{'e': 'add.add', 't': 'Add', 'i': 'add-circle-line', 'f': 'HIDE_ADD_SET'}, # noqa: E501
|
||||
{'e': 'part.list', 't': 'Parts', 'i': 'shapes-line', 'f': 'HIDE_ALL_PARTS'}, # noqa: E501
|
||||
{'e': 'part.missing', 't': 'Missing', 'i': 'error-warning-line', 'f': 'HIDE_MISSING_PARTS'}, # noqa: E501
|
||||
{'e': 'part.problem', 't': 'Problems', 'i': 'error-warning-line', 'f': 'HIDE_ALL_PROBLEMS_PARTS'}, # noqa: E501
|
||||
{'e': 'minifigure.list', 't': 'Minifigures', 'i': 'group-line', 'f': 'HIDE_ALL_MINIFIGURES'}, # noqa: E501
|
||||
{'e': 'instructions.list', 't': 'Instructions', 'i': 'file-line', 'f': 'HIDE_ALL_INSTRUCTIONS'}, # noqa: E501
|
||||
{'e': 'storage.list', 't': 'Storages', 'i': 'archive-2-line', 'f': 'HIDE_ALL_STORAGES'}, # noqa: E501
|
||||
{'e': 'statistics.overview', 't': 'Statistics', 'i': 'bar-chart-line', 'f': 'HIDE_STATISTICS'}, # noqa: E501
|
||||
{'e': 'wish.list', 't': 'Wishlist', 'i': 'gift-line', 'f': 'HIDE_WISHES'},
|
||||
{'e': 'admin.admin', 't': 'Admin', 'i': 'settings-4-line', 'f': 'HIDE_ADMIN'}, # noqa: E501
|
||||
]
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
from flask import current_app, request
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
|
||||
def get_pagination_config(entity_type: str) -> Tuple[int, bool]:
|
||||
"""Get pagination configuration for an entity type (sets, parts, minifigures)"""
|
||||
# Check if pagination is enabled for this specific entity type
|
||||
pagination_key = f'{entity_type.upper()}_SERVER_SIDE_PAGINATION'
|
||||
use_pagination = current_app.config.get(pagination_key, False)
|
||||
|
||||
if not use_pagination:
|
||||
return 0, False
|
||||
|
||||
# Determine page size based on device type and entity
|
||||
user_agent = request.headers.get('User-Agent', '').lower()
|
||||
is_mobile = any(device in user_agent for device in ['mobile', 'android', 'iphone', 'ipad'])
|
||||
|
||||
# Get appropriate config keys based on entity type
|
||||
entity_upper = entity_type.upper()
|
||||
desktop_key = f'{entity_upper}_PAGINATION_SIZE_DESKTOP'
|
||||
mobile_key = f'{entity_upper}_PAGINATION_SIZE_MOBILE'
|
||||
|
||||
per_page = current_app.config[mobile_key] if is_mobile else current_app.config[desktop_key]
|
||||
|
||||
return per_page, is_mobile
|
||||
|
||||
|
||||
def build_pagination_context(page: int, per_page: int, total_count: int, is_mobile: bool) -> Dict[str, Any]:
|
||||
"""Build pagination context for templates"""
|
||||
total_pages = (total_count + per_page - 1) // per_page if total_count > 0 else 1
|
||||
has_prev = page > 1
|
||||
has_next = page < total_pages
|
||||
|
||||
return {
|
||||
'page': page,
|
||||
'per_page': per_page,
|
||||
'total_count': total_count,
|
||||
'total_pages': total_pages,
|
||||
'has_prev': has_prev,
|
||||
'has_next': has_next,
|
||||
'is_mobile': is_mobile
|
||||
}
|
||||
|
||||
|
||||
def get_request_params() -> Tuple[str, str, str, int]:
|
||||
"""Extract common request parameters for pagination"""
|
||||
search_query = request.args.get('search', '').strip()
|
||||
sort_field = request.args.get('sort', '')
|
||||
sort_order = request.args.get('order', 'asc')
|
||||
page = int(request.args.get('page', 1))
|
||||
|
||||
return search_query, sort_field, sort_order, page
|
||||
@@ -35,3 +35,28 @@ def parse_set(set: str, /) -> str:
|
||||
))
|
||||
|
||||
return '{number}-{version}'.format(number=number, version=version)
|
||||
|
||||
|
||||
# Make sense of string supposed to contain a minifigure ID
|
||||
def parse_minifig(figure: str, /) -> str:
|
||||
# Minifigure format is typically fig-XXXXXX
|
||||
# We'll accept with or without the 'fig-' prefix
|
||||
figure = figure.strip()
|
||||
|
||||
if not figure.startswith('fig-'):
|
||||
# Try to add the prefix if it's just numbers
|
||||
if figure.isdigit():
|
||||
figure = 'fig-{figure}'.format(figure=figure.zfill(6))
|
||||
else:
|
||||
raise ErrorException('Minifigure "{figure}" must start with "fig-"'.format(
|
||||
figure=figure,
|
||||
))
|
||||
|
||||
# Validate format: fig-XXXXXX where X can be digits or letters
|
||||
parts = figure.split('-')
|
||||
if len(parts) != 2 or parts[0] != 'fig':
|
||||
raise ErrorException('Invalid minifigure format "{figure}". Expected format: fig-XXXXXX'.format(
|
||||
figure=figure,
|
||||
))
|
||||
|
||||
return figure
|
||||
|
||||
+258
-193
@@ -1,23 +1,25 @@
|
||||
import os
|
||||
import logging
|
||||
from sqlite3 import Row
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
import traceback
|
||||
|
||||
from flask import current_app, url_for
|
||||
from flask import url_for
|
||||
|
||||
from .exceptions import DatabaseException, ErrorException, NotFoundException
|
||||
from .rebrickable_image import RebrickableImage
|
||||
from .record import BrickRecord
|
||||
from .exceptions import ErrorException, NotFoundException
|
||||
from .rebrickable_part import RebrickablePart
|
||||
from .sql import BrickSQL
|
||||
if TYPE_CHECKING:
|
||||
from .minifigure import BrickMinifigure
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego set or minifig part
|
||||
class BrickPart(BrickRecord):
|
||||
brickset: 'BrickSet | None'
|
||||
minifigure: 'BrickMinifigure | None'
|
||||
class BrickPart(RebrickablePart):
|
||||
identifier: str
|
||||
kind: str
|
||||
|
||||
# Queries
|
||||
insert_query: str = 'part/insert'
|
||||
@@ -30,74 +32,91 @@ class BrickPart(BrickRecord):
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
record: Row | dict[str, Any] | None = None,
|
||||
record: Row | dict[str, Any] | None = None
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# Save the brickset and minifigure
|
||||
self.brickset = brickset
|
||||
self.minifigure = minifigure
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# Delete missing part
|
||||
def delete_missing(self, /) -> None:
|
||||
BrickSQL().execute_and_commit(
|
||||
'missing/delete/from_set',
|
||||
parameters=self.sql_parameters()
|
||||
super().__init__(
|
||||
brickset=brickset,
|
||||
minifigure=minifigure,
|
||||
record=record
|
||||
)
|
||||
|
||||
# Set missing part
|
||||
def set_missing(self, quantity: int, /) -> None:
|
||||
parameters = self.sql_parameters()
|
||||
parameters['quantity'] = quantity
|
||||
if self.minifigure is not None:
|
||||
self.identifier = self.minifigure.fields.figure
|
||||
self.kind = 'Minifigure'
|
||||
elif self.brickset is not None:
|
||||
self.identifier = self.brickset.fields.set
|
||||
self.kind = 'Set'
|
||||
|
||||
# Can't use UPSERT because the database has no keys
|
||||
# Try to update
|
||||
database = BrickSQL()
|
||||
rows, _ = database.execute(
|
||||
'missing/update/from_set',
|
||||
parameters=parameters,
|
||||
)
|
||||
# Import a part into the database
|
||||
def download(self, socket: 'BrickSocket', refresh: bool = False) -> bool:
|
||||
if self.brickset is None:
|
||||
raise ErrorException('Importing a part from Rebrickable outside of a set is not supported') # noqa: E501
|
||||
|
||||
# Insert if no row has been affected
|
||||
if not rows:
|
||||
rows, _ = database.execute(
|
||||
'missing/insert',
|
||||
parameters=parameters,
|
||||
try:
|
||||
# Insert into the database
|
||||
socket.auto_progress(
|
||||
message='{kind} {identifier}: inserting part {part} into database'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
identifier=self.identifier,
|
||||
part=self.fields.part
|
||||
)
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException(
|
||||
'Could not update the missing quantity for part {id}'.format( # noqa: E501
|
||||
id=self.fields.id
|
||||
)
|
||||
)
|
||||
if not refresh:
|
||||
# Insert into database
|
||||
self.insert(commit=False)
|
||||
|
||||
database.commit()
|
||||
# Insert the rebrickable set into database
|
||||
self.insert_rebrickable()
|
||||
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error while importing part {part} from {kind} {identifier}: {error}'.format( # noqa: E501
|
||||
part=self.fields.part,
|
||||
kind=self.kind,
|
||||
identifier=self.identifier,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# A identifier for HTML component
|
||||
def html_id(self, prefix: str | None = None, /) -> str:
|
||||
components: list[str] = ['part']
|
||||
|
||||
if prefix is not None:
|
||||
components.append(prefix)
|
||||
|
||||
if self.fields.figure is not None:
|
||||
components.append(self.fields.figure)
|
||||
|
||||
components.append(self.fields.part)
|
||||
components.append(str(self.fields.color))
|
||||
components.append(str(self.fields.spare))
|
||||
|
||||
return '-'.join(components)
|
||||
|
||||
# Select a generic part
|
||||
def select_generic(
|
||||
self,
|
||||
part_num: str,
|
||||
color_id: int,
|
||||
part: str,
|
||||
color: int,
|
||||
/,
|
||||
*,
|
||||
element_id: int | None = None
|
||||
) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part_num = part_num
|
||||
self.fields.color_id = color_id
|
||||
self.fields.element_id = element_id
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
if not self.select(override_query=self.generic_query):
|
||||
raise NotFoundException(
|
||||
'Part with number {number}, color ID {color} and element ID {element} was not found in the database'.format( # noqa: E501
|
||||
number=self.fields.part_num,
|
||||
color=self.fields.color_id,
|
||||
element=self.fields.element_id,
|
||||
'Part with number {number}, color ID {color} was not found in the database'.format( # noqa: E501
|
||||
number=self.fields.part,
|
||||
color=self.fields.color,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -107,7 +126,9 @@ class BrickPart(BrickRecord):
|
||||
def select_specific(
|
||||
self,
|
||||
brickset: 'BrickSet',
|
||||
id: str,
|
||||
part: str,
|
||||
color: int,
|
||||
spare: int,
|
||||
/,
|
||||
*,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
@@ -115,168 +136,212 @@ class BrickPart(BrickRecord):
|
||||
# Save the parameters to the fields
|
||||
self.brickset = brickset
|
||||
self.minifigure = minifigure
|
||||
self.fields.id = id
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
self.fields.spare = spare
|
||||
|
||||
if not self.select():
|
||||
if self.minifigure is not None:
|
||||
figure = self.minifigure.fields.figure
|
||||
else:
|
||||
figure = None
|
||||
|
||||
raise NotFoundException(
|
||||
'Part with ID {id} from set {set} was not found in the database'.format( # noqa: E501
|
||||
'Part {part} with color {color} (spare: {spare}) from set {set} ({id}) (minifigure: {figure}) was not found in the database'.format( # noqa: E501
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
id=self.fields.id,
|
||||
set=self.brickset.fields.set,
|
||||
figure=figure,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Return a dict with common SQL parameters for a part
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters = super().sql_parameters()
|
||||
|
||||
# Supplement from the brickset
|
||||
if 'u_id' not in parameters and self.brickset is not None:
|
||||
parameters['u_id'] = self.brickset.fields.id
|
||||
|
||||
if 'set_num' not in parameters:
|
||||
if self.minifigure is not None:
|
||||
parameters['set_num'] = self.minifigure.fields.fig_num
|
||||
|
||||
elif self.brickset is not None:
|
||||
parameters['set_num'] = self.brickset.fields.set
|
||||
|
||||
return parameters
|
||||
|
||||
# Update the missing part
|
||||
def update_missing(self, missing: Any, /) -> None:
|
||||
# If empty, delete it
|
||||
if missing == '':
|
||||
self.delete_missing()
|
||||
|
||||
# Update checked state for part walkthrough
|
||||
def update_checked(self, json: Any | None, /) -> bool:
|
||||
# Handle both direct 'checked' key and changer.js 'value' key format
|
||||
if json:
|
||||
checked = json.get('checked', json.get('value', False))
|
||||
else:
|
||||
# Try to understand it as a number
|
||||
try:
|
||||
missing = int(missing)
|
||||
except Exception:
|
||||
raise ErrorException('"{missing}" is not a valid integer'.format( # noqa: E501
|
||||
missing=missing
|
||||
))
|
||||
checked = False
|
||||
|
||||
# If 0, delete it
|
||||
if missing == 0:
|
||||
self.delete_missing()
|
||||
checked = bool(checked)
|
||||
|
||||
else:
|
||||
# If negative, it's an error
|
||||
if missing < 0:
|
||||
raise ErrorException('Cannot set a negative missing value')
|
||||
# Update the field
|
||||
self.fields.checked = checked
|
||||
|
||||
# Otherwise upsert it
|
||||
# Not checking if it is too much, you do you
|
||||
self.set_missing(missing)
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for(
|
||||
'part.details',
|
||||
number=self.fields.part_num,
|
||||
color=self.fields.color_id,
|
||||
element=self.fields.element_id,
|
||||
BrickSQL().execute_and_commit(
|
||||
'part/update/checked',
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
# Compute the url for the bricklink page
|
||||
def url_for_bricklink(self, /) -> str:
|
||||
if current_app.config['BRICKLINK_LINKS']:
|
||||
try:
|
||||
return current_app.config['BRICKLINK_LINK_PART_PATTERN'].format( # noqa: E501
|
||||
number=self.fields.part_num,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return checked
|
||||
|
||||
return ''
|
||||
|
||||
# Compute the url for the part image
|
||||
def url_for_image(self, /) -> str:
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
if self.fields.part_img_url is None:
|
||||
file = RebrickableImage.nil_name()
|
||||
else:
|
||||
file = self.fields.part_img_url_id
|
||||
|
||||
return RebrickableImage.static_url(file, 'PARTS_FOLDER')
|
||||
else:
|
||||
if self.fields.part_img_url is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL']
|
||||
else:
|
||||
return self.fields.part_img_url
|
||||
|
||||
# Compute the url for missing part
|
||||
def url_for_missing(self, /) -> str:
|
||||
# Different URL for a minifigure part
|
||||
if self.minifigure is not None:
|
||||
# Compute the url for updating checked state
|
||||
def url_for_checked(self, /) -> str:
|
||||
# Check if this is an individual minifigure (has minifigure with id field, no brickset)
|
||||
if self.minifigure is not None and hasattr(self.minifigure.fields, 'id') and self.brickset is None:
|
||||
# Individual minifigure part
|
||||
return url_for(
|
||||
'set.missing_minifigure_part',
|
||||
id=self.fields.u_id,
|
||||
minifigure_id=self.minifigure.fields.fig_num,
|
||||
part_id=self.fields.id,
|
||||
'individual_minifigure.checked_part',
|
||||
id=self.minifigure.fields.id,
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
)
|
||||
|
||||
# Set-based part (with or without minifigure)
|
||||
if self.minifigure is not None:
|
||||
figure = self.minifigure.fields.figure
|
||||
else:
|
||||
figure = None
|
||||
|
||||
return url_for(
|
||||
'set.missing_part',
|
||||
id=self.fields.u_id,
|
||||
part_id=self.fields.id
|
||||
'set.checked_part',
|
||||
id=self.fields.id,
|
||||
figure=figure,
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
)
|
||||
|
||||
# Compute the url for the rebrickable page
|
||||
def url_for_rebrickable(self, /) -> str:
|
||||
if current_app.config['REBRICKABLE_LINKS']:
|
||||
try:
|
||||
return current_app.config['REBRICKABLE_LINK_PART_PATTERN'].format( # noqa: E501
|
||||
number=self.fields.part_num,
|
||||
color=self.fields.color_id,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
# Update a problematic part
|
||||
def update_problem(self, problem: str, json: Any | None, /) -> int:
|
||||
amount: str | int = json.get('value', '') # type: ignore
|
||||
|
||||
return ''
|
||||
# We need a positive integer
|
||||
try:
|
||||
if amount == '':
|
||||
amount = 0
|
||||
|
||||
# Normalize from Rebrickable
|
||||
@staticmethod
|
||||
def from_rebrickable(
|
||||
data: dict[str, Any],
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
**_,
|
||||
) -> dict[str, Any]:
|
||||
record = {
|
||||
'set_num': data['set_num'],
|
||||
'id': data['id'],
|
||||
'part_num': data['part']['part_num'],
|
||||
'name': data['part']['name'],
|
||||
'part_img_url': data['part']['part_img_url'],
|
||||
'part_img_url_id': None,
|
||||
'color_id': data['color']['id'],
|
||||
'color_name': data['color']['name'],
|
||||
'quantity': data['quantity'],
|
||||
'is_spare': data['is_spare'],
|
||||
'element_id': data['element_id'],
|
||||
}
|
||||
amount = int(amount)
|
||||
|
||||
if brickset is not None:
|
||||
record['u_id'] = brickset.fields.id
|
||||
if amount < 0:
|
||||
amount = 0
|
||||
except Exception:
|
||||
raise ErrorException('"{amount}" is not a valid integer'.format(
|
||||
amount=amount
|
||||
))
|
||||
|
||||
if minifigure is not None:
|
||||
record['set_num'] = data['fig_num']
|
||||
if amount < 0:
|
||||
raise ErrorException('Cannot set a negative amount')
|
||||
|
||||
# Extract the file name
|
||||
if data['part']['part_img_url'] is not None:
|
||||
part_img_url_file = os.path.basename(
|
||||
urlparse(data['part']['part_img_url']).path
|
||||
setattr(self.fields, problem, amount)
|
||||
|
||||
BrickSQL().execute_and_commit(
|
||||
'part/update/{problem}'.format(problem=problem),
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
return amount
|
||||
|
||||
# Compute the url for problematic part
|
||||
def url_for_problem(self, problem: str, /) -> str:
|
||||
# Check if this is an individual minifigure (has minifigure with id field, no brickset)
|
||||
if self.minifigure is not None and hasattr(self.minifigure.fields, 'id') and self.brickset is None:
|
||||
# Individual minifigure part
|
||||
return url_for(
|
||||
'individual_minifigure.problem_part',
|
||||
id=self.minifigure.fields.id,
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
problem=problem,
|
||||
)
|
||||
|
||||
part_img_url_id, _ = os.path.splitext(part_img_url_file)
|
||||
# Set-based part (with or without minifigure)
|
||||
if self.minifigure is not None:
|
||||
figure = self.minifigure.fields.figure
|
||||
else:
|
||||
figure = None
|
||||
|
||||
if part_img_url_id is not None or part_img_url_id != '':
|
||||
record['part_img_url_id'] = part_img_url_id
|
||||
return url_for(
|
||||
'set.problem_part',
|
||||
id=self.fields.id,
|
||||
figure=figure,
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
problem=problem,
|
||||
)
|
||||
|
||||
return record
|
||||
# Select a specific part from an individual minifigure
|
||||
def select_specific_individual_minifigure(
|
||||
self,
|
||||
minifigure: 'BrickMinifigure',
|
||||
part: str,
|
||||
color: int,
|
||||
spare: int,
|
||||
/,
|
||||
) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.minifigure = minifigure
|
||||
self.fields.id = minifigure.fields.id
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
self.fields.spare = spare
|
||||
|
||||
if not self.select(override_query='individual_minifigure/part/select/specific'):
|
||||
raise NotFoundException(
|
||||
'Part {part} with color {color} (spare: {spare}) from individual minifigure {figure} ({id}) was not found in the database'.format(
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
spare=self.fields.spare,
|
||||
figure=self.minifigure.fields.figure,
|
||||
id=self.minifigure.fields.id,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Update a problematic part for individual minifigure
|
||||
def update_problem_individual_minifigure(self, problem: str, json: Any | None, /) -> int:
|
||||
amount: str | int = json.get('value', '') # type: ignore
|
||||
|
||||
# We need a positive integer
|
||||
try:
|
||||
if amount == '':
|
||||
amount = 0
|
||||
|
||||
amount = int(amount)
|
||||
|
||||
if amount < 0:
|
||||
amount = 0
|
||||
except Exception:
|
||||
raise ErrorException('"{amount}" is not a valid integer'.format(
|
||||
amount=amount
|
||||
))
|
||||
|
||||
if amount < 0:
|
||||
raise ErrorException('Cannot set a negative amount')
|
||||
|
||||
setattr(self.fields, problem, amount)
|
||||
|
||||
BrickSQL().execute_and_commit(
|
||||
'individual_minifigure/part/update/{problem}'.format(problem=problem),
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
return amount
|
||||
|
||||
# Update checked state for individual minifigure part
|
||||
def update_checked_individual_minifigure(self, json: Any | None, /) -> bool:
|
||||
# Handle both direct 'checked' key and changer.js 'value' key format
|
||||
if json:
|
||||
checked = json.get('checked', json.get('value', False))
|
||||
else:
|
||||
checked = False
|
||||
|
||||
checked = bool(checked)
|
||||
|
||||
# Update the field
|
||||
self.fields.checked = checked
|
||||
|
||||
BrickSQL().execute_and_commit(
|
||||
'individual_minifigure/part/update/checked',
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
return checked
|
||||
|
||||
+344
-50
@@ -1,12 +1,18 @@
|
||||
import logging
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
import traceback
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .part import BrickPart
|
||||
from .rebrickable import Rebrickable
|
||||
from .record_list import BrickRecordList
|
||||
if TYPE_CHECKING:
|
||||
from .minifigure import BrickMinifigure
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego set or minifig parts
|
||||
@@ -17,10 +23,14 @@ class BrickPartList(BrickRecordList[BrickPart]):
|
||||
|
||||
# Queries
|
||||
all_query: str = 'part/list/all'
|
||||
all_by_owner_query: str = 'part/list/all_by_owner'
|
||||
different_color_query = 'part/list/with_different_color'
|
||||
individual_minifigure_query: str = 'individual_minifigure/part/list/from_instance'
|
||||
last_query: str = 'part/list/last'
|
||||
minifigure_query: str = 'part/list/from_minifigure'
|
||||
missing_query: str = 'part/list/missing'
|
||||
select_query: str = 'part/list/from_set'
|
||||
problem_query: str = 'part/list/problem'
|
||||
print_query: str = 'part/list/from_print'
|
||||
select_query: str = 'part/list/specific'
|
||||
|
||||
def __init__(self, /):
|
||||
super().__init__()
|
||||
@@ -34,18 +44,146 @@ class BrickPartList(BrickRecordList[BrickPart]):
|
||||
|
||||
# Load all parts
|
||||
def all(self, /) -> Self:
|
||||
for record in self.select(
|
||||
override_query=self.all_query,
|
||||
order=self.order
|
||||
):
|
||||
part = BrickPart(record=record)
|
||||
|
||||
self.records.append(part)
|
||||
self.list(override_query=self.all_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load parts from a brickset or minifigure
|
||||
def load(
|
||||
# Load all parts by owner
|
||||
def all_by_owner(self, owner_id: str | None = None, /) -> Self:
|
||||
# Save the owner_id parameter
|
||||
self.fields.owner_id = owner_id
|
||||
|
||||
# Load the parts from the database
|
||||
self.list(override_query=self.all_by_owner_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load all parts with filters (owner and/or color)
|
||||
def all_filtered(self, owner_id: str | None = None, color_id: str | None = None, /) -> Self:
|
||||
# Save the filter parameters
|
||||
if owner_id is not None:
|
||||
self.fields.owner_id = owner_id
|
||||
if color_id is not None:
|
||||
self.fields.color_id = color_id
|
||||
|
||||
# Choose query based on whether owner filtering is needed
|
||||
if owner_id and owner_id != 'all':
|
||||
query = self.all_by_owner_query
|
||||
else:
|
||||
query = self.all_query
|
||||
|
||||
# Prepare context for query
|
||||
context = {}
|
||||
if current_app.config.get('SKIP_SPARE_PARTS', False):
|
||||
context['skip_spare_parts'] = True
|
||||
|
||||
# Load the parts from the database
|
||||
self.list(override_query=query, **context)
|
||||
|
||||
return self
|
||||
|
||||
# Load parts with pagination support
|
||||
def all_filtered_paginated(
|
||||
self,
|
||||
owner_id: str | None = None,
|
||||
color_id: str | None = None,
|
||||
search_query: str | None = None,
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
sort_field: str | None = None,
|
||||
sort_order: str = 'asc'
|
||||
) -> tuple[Self, int]:
|
||||
# Prepare filter context
|
||||
filter_context = {}
|
||||
if owner_id and owner_id != 'all':
|
||||
filter_context['owner_id'] = owner_id
|
||||
list_query = self.all_by_owner_query
|
||||
else:
|
||||
list_query = self.all_query
|
||||
|
||||
if color_id and color_id != 'all':
|
||||
filter_context['color_id'] = color_id
|
||||
if search_query:
|
||||
filter_context['search_query'] = search_query
|
||||
if current_app.config.get('SKIP_SPARE_PARTS', False):
|
||||
filter_context['skip_spare_parts'] = True
|
||||
|
||||
# Field mapping for sorting
|
||||
field_mapping = {
|
||||
'name': '"rebrickable_parts"."name"',
|
||||
'color': '"rebrickable_parts"."color_name"',
|
||||
'quantity': '"total_quantity"',
|
||||
'missing': '"total_missing"',
|
||||
'damaged': '"total_damaged"',
|
||||
'sets': '"total_sets"',
|
||||
'minifigures': '"total_minifigures"'
|
||||
}
|
||||
|
||||
# Use the base pagination method
|
||||
return self.paginate(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
sort_field=sort_field,
|
||||
sort_order=sort_order,
|
||||
list_query=list_query,
|
||||
field_mapping=field_mapping,
|
||||
**filter_context
|
||||
)
|
||||
|
||||
# Base part list
|
||||
def list(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
override_query: str | None = None,
|
||||
order: str | None = None,
|
||||
limit: int | None = None,
|
||||
offset: int | None = None,
|
||||
**context: Any,
|
||||
) -> None:
|
||||
if order is None:
|
||||
order = self.order
|
||||
|
||||
if hasattr(self, 'brickset'):
|
||||
brickset = self.brickset
|
||||
else:
|
||||
brickset = None
|
||||
|
||||
if hasattr(self, 'minifigure'):
|
||||
minifigure = self.minifigure
|
||||
else:
|
||||
minifigure = None
|
||||
|
||||
# Prepare template context for filtering
|
||||
context_vars = {}
|
||||
if hasattr(self.fields, 'owner_id') and self.fields.owner_id is not None:
|
||||
context_vars['owner_id'] = self.fields.owner_id
|
||||
if hasattr(self.fields, 'color_id') and self.fields.color_id is not None:
|
||||
context_vars['color_id'] = self.fields.color_id
|
||||
if hasattr(self.fields, 'search_query') and self.fields.search_query:
|
||||
context_vars['search_query'] = self.fields.search_query
|
||||
|
||||
# Merge with any additional context passed in
|
||||
context_vars.update(context)
|
||||
|
||||
# Load the sets from the database
|
||||
for record in super().select(
|
||||
override_query=override_query,
|
||||
order=order,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
**context_vars
|
||||
):
|
||||
part = BrickPart(
|
||||
brickset=brickset,
|
||||
minifigure=minifigure,
|
||||
record=record,
|
||||
)
|
||||
|
||||
self.records.append(part)
|
||||
|
||||
# List specific parts from a brickset or minifigure
|
||||
def list_specific(
|
||||
self,
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
@@ -57,17 +195,7 @@ class BrickPartList(BrickRecordList[BrickPart]):
|
||||
self.minifigure = minifigure
|
||||
|
||||
# Load the parts from the database
|
||||
for record in self.select(order=self.order):
|
||||
part = BrickPart(
|
||||
brickset=self.brickset,
|
||||
minifigure=minifigure,
|
||||
record=record,
|
||||
)
|
||||
|
||||
if current_app.config['SKIP_SPARE_PARTS'] and part.fields.is_spare:
|
||||
continue
|
||||
|
||||
self.records.append(part)
|
||||
self.list()
|
||||
|
||||
return self
|
||||
|
||||
@@ -81,47 +209,213 @@ class BrickPartList(BrickRecordList[BrickPart]):
|
||||
self.minifigure = minifigure
|
||||
|
||||
# Load the parts from the database
|
||||
for record in self.select(
|
||||
override_query=self.minifigure_query,
|
||||
order=self.order
|
||||
):
|
||||
part = BrickPart(
|
||||
minifigure=minifigure,
|
||||
record=record,
|
||||
)
|
||||
|
||||
if current_app.config['SKIP_SPARE_PARTS'] and part.fields.is_spare:
|
||||
continue
|
||||
|
||||
self.records.append(part)
|
||||
self.list(override_query=self.minifigure_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load missing parts
|
||||
def missing(self, /) -> Self:
|
||||
for record in self.select(
|
||||
override_query=self.missing_query,
|
||||
order=self.order
|
||||
):
|
||||
part = BrickPart(record=record)
|
||||
# Load parts from an individual minifigure instance
|
||||
def from_individual_minifigure(
|
||||
self,
|
||||
minifigure: 'BrickMinifigure',
|
||||
/,
|
||||
) -> Self:
|
||||
# Save the minifigure
|
||||
self.minifigure = minifigure
|
||||
|
||||
self.records.append(part)
|
||||
# Load the parts from the database using the instance-specific query
|
||||
self.list(override_query=self.individual_minifigure_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load generic parts from a print
|
||||
def from_print(
|
||||
self,
|
||||
brickpart: BrickPart,
|
||||
/,
|
||||
) -> Self:
|
||||
# Save the part and print
|
||||
if brickpart.fields.print is not None:
|
||||
self.fields.print = brickpart.fields.print
|
||||
else:
|
||||
self.fields.print = brickpart.fields.part
|
||||
|
||||
self.fields.part = brickpart.fields.part
|
||||
self.fields.color = brickpart.fields.color
|
||||
|
||||
# Load the parts from the database
|
||||
self.list(override_query=self.print_query)
|
||||
|
||||
return self
|
||||
|
||||
# Load problematic parts
|
||||
def problem(self, /) -> Self:
|
||||
self.list(override_query=self.problem_query)
|
||||
|
||||
return self
|
||||
|
||||
def problem_filtered(self, owner_id: str | None = None, color_id: str | None = None, /) -> Self:
|
||||
# Save the filter parameters for client-side filtering
|
||||
if owner_id is not None:
|
||||
self.fields.owner_id = owner_id
|
||||
if color_id is not None:
|
||||
self.fields.color_id = color_id
|
||||
|
||||
# Prepare context for query
|
||||
context = {}
|
||||
if owner_id and owner_id != 'all':
|
||||
context['owner_id'] = owner_id
|
||||
if color_id and color_id != 'all':
|
||||
context['color_id'] = color_id
|
||||
if current_app.config.get('SKIP_SPARE_PARTS', False):
|
||||
context['skip_spare_parts'] = True
|
||||
|
||||
# Load the problematic parts from the database
|
||||
self.list(override_query=self.problem_query, **context)
|
||||
|
||||
return self
|
||||
|
||||
def problem_paginated(
|
||||
self,
|
||||
owner_id: str | None = None,
|
||||
color_id: str | None = None,
|
||||
search_query: str | None = None,
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
sort_field: str | None = None,
|
||||
sort_order: str = 'asc'
|
||||
) -> tuple[Self, int]:
|
||||
# Prepare filter context
|
||||
filter_context = {}
|
||||
if owner_id and owner_id != 'all':
|
||||
filter_context['owner_id'] = owner_id
|
||||
if color_id and color_id != 'all':
|
||||
filter_context['color_id'] = color_id
|
||||
if search_query:
|
||||
filter_context['search_query'] = search_query
|
||||
if current_app.config.get('SKIP_SPARE_PARTS', False):
|
||||
filter_context['skip_spare_parts'] = True
|
||||
|
||||
# Field mapping for sorting
|
||||
field_mapping = {
|
||||
'name': '"rebrickable_parts"."name"',
|
||||
'color': '"rebrickable_parts"."color_name"',
|
||||
'quantity': '"total_quantity"',
|
||||
'missing': '"total_missing"',
|
||||
'damaged': '"total_damaged"',
|
||||
'sets': '"total_sets"',
|
||||
'minifigures': '"total_minifigures"'
|
||||
}
|
||||
|
||||
# Use the base pagination method with problem query
|
||||
return self.paginate(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
sort_field=sort_field,
|
||||
sort_order=sort_order,
|
||||
list_query=self.problem_query,
|
||||
field_mapping=field_mapping,
|
||||
**filter_context
|
||||
)
|
||||
|
||||
# Return a dict with common SQL parameters for a parts list
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters: dict[str, Any] = {}
|
||||
parameters: dict[str, Any] = super().sql_parameters()
|
||||
|
||||
# Set id
|
||||
# Set id - prioritize brickset, then check minifigure
|
||||
if self.brickset is not None:
|
||||
parameters['u_id'] = self.brickset.fields.id
|
||||
parameters['id'] = self.brickset.fields.id
|
||||
elif self.minifigure is not None and hasattr(self.minifigure.fields, 'id'):
|
||||
parameters['id'] = self.minifigure.fields.id
|
||||
|
||||
# Use the minifigure number if present,
|
||||
# otherwise use the set number
|
||||
if self.minifigure is not None:
|
||||
parameters['set_num'] = self.minifigure.fields.fig_num
|
||||
elif self.brickset is not None:
|
||||
parameters['set_num'] = self.brickset.fields.set
|
||||
parameters['figure'] = self.minifigure.fields.figure
|
||||
else:
|
||||
parameters['figure'] = None
|
||||
|
||||
return parameters
|
||||
|
||||
# Load generic parts with same base but different color
|
||||
def with_different_color(
|
||||
self,
|
||||
brickpart: BrickPart,
|
||||
/,
|
||||
) -> Self:
|
||||
# Save the part
|
||||
self.fields.part = brickpart.fields.part
|
||||
self.fields.color = brickpart.fields.color
|
||||
|
||||
# Load the parts from the database
|
||||
self.list(override_query=self.different_color_query)
|
||||
|
||||
return self
|
||||
|
||||
# Import the parts from Rebrickable
|
||||
@staticmethod
|
||||
def download(
|
||||
socket: 'BrickSocket',
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
*,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
refresh: bool = False
|
||||
) -> bool:
|
||||
if minifigure is not None:
|
||||
identifier = minifigure.fields.figure
|
||||
kind = 'Minifigure'
|
||||
method = 'get_minifig_elements'
|
||||
else:
|
||||
identifier = brickset.fields.set
|
||||
kind = 'Set'
|
||||
method = 'get_set_elements'
|
||||
|
||||
try:
|
||||
socket.auto_progress(
|
||||
message='{kind} {identifier}: loading parts inventory from Rebrickable'.format( # noqa: E501
|
||||
kind=kind,
|
||||
identifier=identifier,
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
logger.debug('rebrick.lego.{method}("{identifier}")'.format(
|
||||
method=method,
|
||||
identifier=identifier,
|
||||
))
|
||||
|
||||
inventory = Rebrickable[BrickPart](
|
||||
method,
|
||||
identifier,
|
||||
BrickPart,
|
||||
socket=socket,
|
||||
brickset=brickset,
|
||||
minifigure=minifigure,
|
||||
).list()
|
||||
|
||||
# Process each part
|
||||
number_of_parts: int = 0
|
||||
for part in inventory:
|
||||
# Count the number of parts for minifigures
|
||||
if minifigure is not None:
|
||||
number_of_parts += part.fields.quantity
|
||||
|
||||
if not part.download(socket, refresh=refresh):
|
||||
return False
|
||||
|
||||
if minifigure is not None:
|
||||
minifigure.fields.number_of_parts = number_of_parts
|
||||
|
||||
except Exception as e:
|
||||
socket.fail(
|
||||
message='Error while importing {kind} {identifier} parts list: {error}'.format( # noqa: E501
|
||||
kind=kind,
|
||||
identifier=identifier,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -0,0 +1,437 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import time
|
||||
from typing import Any, NamedTuple, TYPE_CHECKING
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import cloudscraper
|
||||
from flask import current_app, url_for
|
||||
import requests
|
||||
|
||||
from .exceptions import ErrorException
|
||||
if TYPE_CHECKING:
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_peeron_user_agent():
|
||||
"""Get the User-Agent string for Peeron requests from config"""
|
||||
return current_app.config.get('REBRICKABLE_USER_AGENT',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36')
|
||||
|
||||
|
||||
def get_peeron_download_delay():
|
||||
"""Get the delay in milliseconds between Peeron page downloads from config"""
|
||||
return current_app.config.get('PEERON_DOWNLOAD_DELAY', 1000)
|
||||
|
||||
|
||||
def get_min_image_size():
|
||||
"""Get the minimum image size for valid Peeron instruction pages from config"""
|
||||
return current_app.config.get('PEERON_MIN_IMAGE_SIZE', 100)
|
||||
|
||||
|
||||
def get_peeron_instruction_url(set_number: str, version_number: str):
|
||||
"""Get the Peeron instruction page URL using the configured pattern"""
|
||||
pattern = current_app.config.get('PEERON_INSTRUCTION_PATTERN', 'http://peeron.com/scans/{set_number}-{version_number}')
|
||||
return pattern.format(set_number=set_number, version_number=version_number)
|
||||
|
||||
|
||||
def get_peeron_thumbnail_url(set_number: str, version_number: str):
|
||||
"""Get the Peeron thumbnail base URL using the configured pattern"""
|
||||
pattern = current_app.config.get('PEERON_THUMBNAIL_PATTERN', 'http://belay.peeron.com/thumbs/{set_number}-{version_number}/')
|
||||
return pattern.format(set_number=set_number, version_number=version_number)
|
||||
|
||||
|
||||
def get_peeron_scan_url(set_number: str, version_number: str):
|
||||
"""Get the Peeron scan base URL using the configured pattern"""
|
||||
pattern = current_app.config.get('PEERON_SCAN_PATTERN', 'http://belay.peeron.com/scans/{set_number}-{version_number}/')
|
||||
return pattern.format(set_number=set_number, version_number=version_number)
|
||||
|
||||
|
||||
def create_peeron_scraper():
|
||||
"""Create a cloudscraper instance configured for Peeron"""
|
||||
scraper = cloudscraper.create_scraper()
|
||||
scraper.headers.update({
|
||||
"User-Agent": get_peeron_user_agent()
|
||||
})
|
||||
return scraper
|
||||
|
||||
|
||||
def get_peeron_cache_dir():
|
||||
"""Get the base directory for Peeron caching"""
|
||||
static_dir = Path(current_app.static_folder)
|
||||
cache_dir = static_dir / 'images' / 'peeron_cache'
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
return cache_dir
|
||||
|
||||
|
||||
def get_set_cache_dir(set_number: str, version_number: str) -> tuple[Path, Path]:
|
||||
"""Get cache directories for a specific set"""
|
||||
base_cache_dir = get_peeron_cache_dir()
|
||||
set_cache_key = f"{set_number}-{version_number}"
|
||||
|
||||
full_cache_dir = base_cache_dir / 'full' / set_cache_key
|
||||
thumb_cache_dir = base_cache_dir / 'thumbs' / set_cache_key
|
||||
|
||||
full_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
thumb_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return full_cache_dir, thumb_cache_dir
|
||||
|
||||
|
||||
def cache_full_image_and_generate_thumbnail(image_url: str, page_number: str, set_number: str, version_number: str, session=None) -> tuple[str | None, str | None]:
|
||||
"""
|
||||
Download and cache full-size image, then generate a thumbnail preview.
|
||||
Uses the full-size scan URLs from Peeron.
|
||||
Returns (cached_image_path, thumbnail_url) or (None, None) if caching fails.
|
||||
"""
|
||||
try:
|
||||
full_cache_dir, thumb_cache_dir = get_set_cache_dir(set_number, version_number)
|
||||
|
||||
full_filename = f"{page_number}.jpg"
|
||||
thumb_filename = f"{page_number}.jpg"
|
||||
full_cache_path = full_cache_dir / full_filename
|
||||
thumb_cache_path = thumb_cache_dir / thumb_filename
|
||||
|
||||
# Return existing cached files if they exist
|
||||
if full_cache_path.exists() and thumb_cache_path.exists():
|
||||
set_cache_key = f"{set_number}-{version_number}"
|
||||
thumbnail_url = url_for('static', filename=f'images/peeron_cache/thumbs/{set_cache_key}/{thumb_filename}')
|
||||
return str(full_cache_path), thumbnail_url
|
||||
|
||||
# Download the full-size image using provided session or create new one
|
||||
if session is None:
|
||||
session = create_peeron_scraper()
|
||||
response = session.get(image_url, timeout=30)
|
||||
|
||||
if response.status_code == 200 and len(response.content) > 0:
|
||||
# Validate it's actually an image by checking minimum size
|
||||
min_size = get_min_image_size()
|
||||
if len(response.content) < min_size:
|
||||
logger.warning(f"Image too small, skipping cache: {image_url}")
|
||||
return None, None
|
||||
|
||||
# Write full-size image to cache
|
||||
with open(full_cache_path, 'wb') as f:
|
||||
f.write(response.content)
|
||||
|
||||
logger.debug(f"Cached full image: {image_url} -> {full_cache_path}")
|
||||
|
||||
# Generate thumbnail from the cached full image
|
||||
try:
|
||||
from PIL import Image
|
||||
with Image.open(full_cache_path) as img:
|
||||
# Create thumbnail (max 150px on longest side to match template)
|
||||
img.thumbnail((150, 150), Image.Resampling.LANCZOS)
|
||||
img.save(thumb_cache_path, 'JPEG', quality=85)
|
||||
|
||||
logger.debug(f"Generated thumbnail: {full_cache_path} -> {thumb_cache_path}")
|
||||
|
||||
set_cache_key = f"{set_number}-{version_number}"
|
||||
thumbnail_url = url_for('static', filename=f'images/peeron_cache/thumbs/{set_cache_key}/{thumb_filename}')
|
||||
return str(full_cache_path), thumbnail_url
|
||||
|
||||
except Exception as thumb_error:
|
||||
logger.error(f"Failed to generate thumbnail for {page_number}: {thumb_error}")
|
||||
# Clean up the full image if thumbnail generation failed
|
||||
if full_cache_path.exists():
|
||||
full_cache_path.unlink()
|
||||
return None, None
|
||||
else:
|
||||
logger.warning(f"Failed to download full image: {image_url}")
|
||||
return None, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching full image {image_url}: {e}")
|
||||
return None, None
|
||||
|
||||
|
||||
def clear_set_cache(set_number: str, version_number: str) -> int:
|
||||
"""
|
||||
Clear all cached files for a specific set after PDF generation.
|
||||
Returns the number of files deleted.
|
||||
"""
|
||||
try:
|
||||
full_cache_dir, thumb_cache_dir = get_set_cache_dir(set_number, version_number)
|
||||
deleted_count = 0
|
||||
|
||||
# Delete full images
|
||||
if full_cache_dir.exists():
|
||||
for cache_file in full_cache_dir.glob('*.jpg'):
|
||||
try:
|
||||
cache_file.unlink()
|
||||
deleted_count += 1
|
||||
logger.debug(f"Deleted cached full image: {cache_file}")
|
||||
except OSError as e:
|
||||
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
|
||||
|
||||
# Remove directory if empty
|
||||
try:
|
||||
full_cache_dir.rmdir()
|
||||
except OSError:
|
||||
pass # Directory not empty or other error
|
||||
|
||||
# Delete thumbnails
|
||||
if thumb_cache_dir.exists():
|
||||
for cache_file in thumb_cache_dir.glob('*.jpg'):
|
||||
try:
|
||||
cache_file.unlink()
|
||||
deleted_count += 1
|
||||
logger.debug(f"Deleted cached thumbnail: {cache_file}")
|
||||
except OSError as e:
|
||||
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
|
||||
|
||||
# Remove directory if empty
|
||||
try:
|
||||
thumb_cache_dir.rmdir()
|
||||
except OSError:
|
||||
pass # Directory not empty or other error
|
||||
|
||||
# Try to remove set directory if empty
|
||||
try:
|
||||
set_cache_key = f"{set_number}-{version_number}"
|
||||
full_cache_dir.parent.rmdir() if full_cache_dir.parent.name == set_cache_key else None
|
||||
thumb_cache_dir.parent.rmdir() if thumb_cache_dir.parent.name == set_cache_key else None
|
||||
except OSError:
|
||||
pass # Directory not empty or other error
|
||||
|
||||
logger.info(f"Set cache cleanup completed for {set_number}-{version_number}: {deleted_count} files deleted")
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during set cache cleanup for {set_number}-{version_number}: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def clear_old_cache(max_age_days: int = 7) -> int:
|
||||
"""
|
||||
Clear old cache files across all sets.
|
||||
Returns the number of files deleted.
|
||||
"""
|
||||
try:
|
||||
base_cache_dir = get_peeron_cache_dir()
|
||||
if not base_cache_dir.exists():
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
max_age_seconds = max_age_days * 24 * 60 * 60
|
||||
current_time = time.time()
|
||||
|
||||
# Clean both full and thumbs directories
|
||||
for cache_type in ['full', 'thumbs']:
|
||||
cache_type_dir = base_cache_dir / cache_type
|
||||
if cache_type_dir.exists():
|
||||
for set_dir in cache_type_dir.iterdir():
|
||||
if set_dir.is_dir():
|
||||
for cache_file in set_dir.glob('*.jpg'):
|
||||
file_age = current_time - os.path.getmtime(cache_file)
|
||||
if file_age > max_age_seconds:
|
||||
try:
|
||||
cache_file.unlink()
|
||||
deleted_count += 1
|
||||
logger.debug(f"Deleted old cache file: {cache_file}")
|
||||
except OSError as e:
|
||||
logger.warning(f"Failed to delete cache file {cache_file}: {e}")
|
||||
|
||||
# Remove empty directories
|
||||
try:
|
||||
if not any(set_dir.iterdir()):
|
||||
set_dir.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
logger.info(f"Old cache cleanup completed: {deleted_count} files deleted")
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during old cache cleanup: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
class PeeronPage(NamedTuple):
|
||||
"""Represents a single instruction page from Peeron"""
|
||||
page_number: str
|
||||
original_image_url: str # Original Peeron full-size image URL
|
||||
cached_full_image_path: str # Local full-size cached image path
|
||||
cached_thumbnail_url: str # Local thumbnail URL for preview
|
||||
alt_text: str
|
||||
rotation: int = 0 # Rotation in degrees (0, 90, 180, 270)
|
||||
|
||||
|
||||
# Peeron instruction scraper
|
||||
class PeeronInstructions(object):
|
||||
socket: 'BrickSocket | None'
|
||||
set_number: str
|
||||
version_number: str
|
||||
pages: list[PeeronPage]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
set_number: str,
|
||||
version_number: str = '1',
|
||||
/,
|
||||
*,
|
||||
socket: 'BrickSocket | None' = None,
|
||||
):
|
||||
# Save the socket
|
||||
self.socket = socket
|
||||
|
||||
# Parse set number (handle both "4011" and "4011-1" formats)
|
||||
if '-' in set_number:
|
||||
parts = set_number.split('-', 1)
|
||||
self.set_number = parts[0]
|
||||
self.version_number = parts[1] if len(parts) > 1 else '1'
|
||||
else:
|
||||
self.set_number = set_number
|
||||
self.version_number = version_number
|
||||
|
||||
# Placeholder for pages
|
||||
self.pages = []
|
||||
|
||||
# Check if instructions exist on Peeron (lightweight)
|
||||
def exists(self, /) -> bool:
|
||||
"""Check if the set exists on Peeron without caching thumbnails"""
|
||||
try:
|
||||
base_url = get_peeron_instruction_url(self.set_number, self.version_number)
|
||||
scraper = create_peeron_scraper()
|
||||
response = scraper.get(base_url)
|
||||
|
||||
if response.status_code != 200:
|
||||
return False
|
||||
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
|
||||
# Check for "Browse instruction library" header (set not found)
|
||||
if soup.find('h1', string="Browse instruction library"):
|
||||
return False
|
||||
|
||||
# Look for thumbnail images to confirm instructions exist
|
||||
thumbnails = soup.select('table[cellspacing="5"] a img[src^="http://belay.peeron.com/thumbs/"]')
|
||||
return len(thumbnails) > 0
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# Find all available instruction pages on Peeron
|
||||
def find_pages(self, /) -> list[PeeronPage]:
|
||||
"""
|
||||
Scrape Peeron's HTML and return a list of available instruction pages.
|
||||
Similar to BrickInstructions.find_instructions() but for Peeron.
|
||||
"""
|
||||
base_url = get_peeron_instruction_url(self.set_number, self.version_number)
|
||||
thumb_base_url = get_peeron_thumbnail_url(self.set_number, self.version_number)
|
||||
scan_base_url = get_peeron_scan_url(self.set_number, self.version_number)
|
||||
|
||||
logger.debug(f"[find_pages] fetching HTML from {base_url!r}")
|
||||
|
||||
# Set up session with persistent cookies for Peeron (like working dl_peeron.py)
|
||||
scraper = create_peeron_scraper()
|
||||
|
||||
# Download the main HTML page to establish session and cookies
|
||||
try:
|
||||
logger.debug(f"[find_pages] Establishing session by visiting: {base_url}")
|
||||
response = scraper.get(base_url)
|
||||
logger.debug(f"[find_pages] Main page visit: HTTP {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
raise ErrorException(f'Failed to load Peeron page for {self.set_number}-{self.version_number}. HTTP {response.status_code}')
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise ErrorException(f'Failed to connect to Peeron: {e}')
|
||||
|
||||
# Parse HTML to locate instruction pages
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
|
||||
# Check for "Browse instruction library" header (set not found)
|
||||
if soup.find('h1', string="Browse instruction library"):
|
||||
raise ErrorException(f'Set {self.set_number}-{self.version_number} not found on Peeron')
|
||||
|
||||
# Locate all thumbnail images in the expected table structure
|
||||
# Use the configured thumbnail pattern to build the expected URL prefix
|
||||
thumb_base_url = get_peeron_thumbnail_url(self.set_number, self.version_number)
|
||||
thumbnails = soup.select(f'table[cellspacing="5"] a img[src^="{thumb_base_url}"]')
|
||||
|
||||
if not thumbnails:
|
||||
raise ErrorException(f'No instruction pages found for {self.set_number}-{self.version_number} on Peeron')
|
||||
|
||||
pages: list[PeeronPage] = []
|
||||
total_thumbnails = len(thumbnails)
|
||||
|
||||
# Initialize progress if socket is available
|
||||
if self.socket:
|
||||
self.socket.progress_total = total_thumbnails
|
||||
self.socket.progress_count = 0
|
||||
self.socket.progress(message=f"Starting to cache {total_thumbnails} full images")
|
||||
|
||||
for idx, img in enumerate(thumbnails, 1):
|
||||
thumb_url = img['src']
|
||||
|
||||
# Extract the page number from the thumbnail URL
|
||||
page_number = thumb_url.split('/')[-2]
|
||||
|
||||
# Build the full-size scan URL using the page number
|
||||
full_size_url = f"{scan_base_url}{page_number}/"
|
||||
|
||||
logger.debug(f"[find_pages] Page {page_number}: thumb={thumb_url}, full_size={full_size_url}")
|
||||
|
||||
# Create alt text for the page
|
||||
alt_text = f"LEGO Instructions {self.set_number}-{self.version_number} Page {page_number}"
|
||||
|
||||
# Report progress if socket is available
|
||||
if self.socket:
|
||||
self.socket.progress_count = idx
|
||||
self.socket.progress(message=f"Caching full image {idx} of {total_thumbnails}")
|
||||
|
||||
# Cache the full-size image and generate thumbnail preview using established session
|
||||
cached_full_path, cached_thumb_url = cache_full_image_and_generate_thumbnail(
|
||||
full_size_url, page_number, self.set_number, self.version_number, session=scraper
|
||||
)
|
||||
|
||||
# Skip this page if caching failed
|
||||
if not cached_full_path or not cached_thumb_url:
|
||||
logger.warning(f"[find_pages] Skipping page {page_number} due to caching failure")
|
||||
continue
|
||||
|
||||
page = PeeronPage(
|
||||
page_number=page_number,
|
||||
original_image_url=full_size_url,
|
||||
cached_full_image_path=cached_full_path,
|
||||
cached_thumbnail_url=cached_thumb_url,
|
||||
alt_text=alt_text
|
||||
)
|
||||
pages.append(page)
|
||||
|
||||
# Cache the pages for later use
|
||||
self.pages = pages
|
||||
|
||||
logger.debug(f"[find_pages] found {len(pages)} pages for {self.set_number}-{self.version_number}")
|
||||
return pages
|
||||
|
||||
# Find instructions with fallback to Peeron
|
||||
@staticmethod
|
||||
def find_instructions_with_peeron_fallback(set: str, /) -> tuple[list[tuple[str, str]], list[PeeronPage] | None]:
|
||||
"""
|
||||
Enhanced version of BrickInstructions.find_instructions() that falls back to Peeron.
|
||||
Returns (rebrickable_instructions, peeron_pages).
|
||||
If rebrickable_instructions is empty, peeron_pages will contain Peeron data.
|
||||
"""
|
||||
from .instructions import BrickInstructions
|
||||
|
||||
# First try Rebrickable
|
||||
try:
|
||||
rebrickable_instructions = BrickInstructions.find_instructions(set)
|
||||
return rebrickable_instructions, None
|
||||
except ErrorException as e:
|
||||
logger.info(f"Rebrickable failed for {set}: {e}. Trying Peeron fallback...")
|
||||
|
||||
# Fallback to Peeron
|
||||
try:
|
||||
peeron = PeeronInstructions(set)
|
||||
peeron_pages = peeron.find_pages()
|
||||
return [], peeron_pages
|
||||
except ErrorException as peeron_error:
|
||||
# Both failed, re-raise original Rebrickable error
|
||||
logger.info(f"Peeron also failed for {set}: {peeron_error}")
|
||||
raise e from peeron_error
|
||||
@@ -0,0 +1,200 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
from typing import Any, TYPE_CHECKING
|
||||
|
||||
import cloudscraper
|
||||
from flask import current_app
|
||||
from PIL import Image
|
||||
|
||||
from .exceptions import DownloadException, ErrorException
|
||||
from .instructions import BrickInstructions
|
||||
from .peeron_instructions import PeeronPage, get_min_image_size, get_peeron_download_delay, get_peeron_instruction_url, create_peeron_scraper
|
||||
if TYPE_CHECKING:
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# PDF generator for Peeron instruction pages
|
||||
class PeeronPDF(object):
|
||||
socket: 'BrickSocket'
|
||||
set_number: str
|
||||
version_number: str
|
||||
pages: list[PeeronPage]
|
||||
filename: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
set_number: str,
|
||||
version_number: str,
|
||||
pages: list[PeeronPage],
|
||||
/,
|
||||
*,
|
||||
socket: 'BrickSocket',
|
||||
):
|
||||
# Save the socket
|
||||
self.socket = socket
|
||||
|
||||
# Save set information
|
||||
self.set_number = set_number
|
||||
self.version_number = version_number
|
||||
self.pages = pages
|
||||
|
||||
# Generate filename following BrickTracker conventions
|
||||
self.filename = f"{set_number}-{version_number}_peeron.pdf"
|
||||
|
||||
# Download pages and create PDF
|
||||
def create_pdf(self, /) -> None:
|
||||
"""
|
||||
Downloads selected Peeron pages and merges them into a PDF.
|
||||
Uses progress updates via socket similar to BrickInstructions.download()
|
||||
"""
|
||||
try:
|
||||
target_path = self._get_target_path()
|
||||
|
||||
# Skip if we already have it
|
||||
if os.path.isfile(target_path):
|
||||
# Create BrickInstructions instance to get PDF URL
|
||||
instructions = BrickInstructions(self.filename)
|
||||
pdf_url = instructions.url()
|
||||
return self.socket.complete(
|
||||
message=f'File {self.filename} already exists, skipped - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
|
||||
)
|
||||
|
||||
# Set up progress tracking
|
||||
total_pages = len(self.pages)
|
||||
self.socket.update_total(total_pages)
|
||||
self.socket.progress_count = 0
|
||||
self.socket.progress(message=f"Starting PDF creation from {total_pages} cached pages")
|
||||
|
||||
# Use cached images directly - no downloads needed!
|
||||
cached_files_with_rotation = []
|
||||
missing_pages = []
|
||||
|
||||
for i, page in enumerate(self.pages):
|
||||
# Check if cached file exists
|
||||
if os.path.isfile(page.cached_full_image_path):
|
||||
cached_files_with_rotation.append((page.cached_full_image_path, page.rotation))
|
||||
|
||||
# Update progress
|
||||
self.socket.progress_count += 1
|
||||
self.socket.progress(
|
||||
message=f"Processing cached page {page.page_number} ({i + 1}/{total_pages})"
|
||||
)
|
||||
else:
|
||||
missing_pages.append(page.page_number)
|
||||
logger.warning(f"Cached image missing for page {page.page_number}: {page.cached_full_image_path}")
|
||||
|
||||
if not cached_files_with_rotation:
|
||||
raise DownloadException(f"No cached images available for set {self.set_number}-{self.version_number}. Cache may have been cleared.")
|
||||
|
||||
elif len(cached_files_with_rotation) < total_pages:
|
||||
# Partial success
|
||||
error_msg = f"Only found {len(cached_files_with_rotation)}/{total_pages} cached images."
|
||||
if missing_pages:
|
||||
error_msg += f" Missing pages: {', '.join(missing_pages)}."
|
||||
logger.warning(error_msg)
|
||||
|
||||
# Create PDF from cached images with rotation
|
||||
self._create_pdf_from_images(cached_files_with_rotation, target_path)
|
||||
|
||||
# Success
|
||||
logger.info(f"Created PDF {self.filename} with {len(cached_files_with_rotation)} pages")
|
||||
|
||||
# Create BrickInstructions instance to get PDF URL
|
||||
instructions = BrickInstructions(self.filename)
|
||||
pdf_url = instructions.url()
|
||||
|
||||
self.socket.complete(
|
||||
message=f'PDF {self.filename} created with {len(cached_files_with_rotation)} pages - <a href="{pdf_url}" target="_blank" class="btn btn-sm btn-primary ms-2"><i class="ri-external-link-line"></i> Open PDF</a>'
|
||||
)
|
||||
|
||||
# Clean up set cache after successful PDF creation
|
||||
try:
|
||||
from .peeron_instructions import clear_set_cache
|
||||
deleted_count = clear_set_cache(self.set_number, self.version_number)
|
||||
if deleted_count > 0:
|
||||
logger.info(f"[create_pdf] Cleaned up {deleted_count} cache files for set {self.set_number}-{self.version_number}")
|
||||
except Exception as e:
|
||||
logger.warning(f"[create_pdf] Failed to clean set cache: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating PDF {self.filename}: {e}")
|
||||
self.socket.fail(
|
||||
message=f"Error creating PDF {self.filename}: {e}"
|
||||
)
|
||||
|
||||
|
||||
# Create PDF from downloaded images
|
||||
def _create_pdf_from_images(self, image_paths_and_rotations: list[tuple[str, int]], output_path: str, /) -> None:
|
||||
"""Create a PDF from a list of image files with their rotations"""
|
||||
try:
|
||||
# Import FPDF (should be available from requirements)
|
||||
from fpdf import FPDF
|
||||
except ImportError:
|
||||
raise ErrorException("FPDF library not available. Install with: pip install fpdf2")
|
||||
|
||||
pdf = FPDF()
|
||||
|
||||
for i, (img_path, rotation) in enumerate(image_paths_and_rotations):
|
||||
try:
|
||||
# Open image and apply rotation if needed
|
||||
with Image.open(img_path) as image:
|
||||
# Apply rotation if specified
|
||||
if rotation != 0:
|
||||
# PIL rotation is counter-clockwise, so we negate for clockwise rotation
|
||||
image = image.rotate(-rotation, expand=True)
|
||||
|
||||
width, height = image.size
|
||||
|
||||
# Add page with image dimensions (convert pixels to mm)
|
||||
# 1 pixel = 0.264583 mm (assuming 96 DPI)
|
||||
page_width = width * 0.264583
|
||||
page_height = height * 0.264583
|
||||
|
||||
pdf.add_page(format=(page_width, page_height))
|
||||
|
||||
# Save rotated image to temporary file for FPDF
|
||||
temp_rotated_path = None
|
||||
if rotation != 0:
|
||||
import tempfile
|
||||
temp_fd, temp_rotated_path = tempfile.mkstemp(suffix='.jpg', prefix=f'peeron_rotated_{i}_')
|
||||
try:
|
||||
os.close(temp_fd) # Close file descriptor, we'll use the path
|
||||
image.save(temp_rotated_path, 'JPEG', quality=95)
|
||||
pdf.image(temp_rotated_path, x=0, y=0, w=page_width, h=page_height)
|
||||
finally:
|
||||
# Clean up rotated temp file
|
||||
if temp_rotated_path and os.path.exists(temp_rotated_path):
|
||||
os.remove(temp_rotated_path)
|
||||
else:
|
||||
pdf.image(img_path, x=0, y=0, w=page_width, h=page_height)
|
||||
|
||||
# Update progress
|
||||
progress_msg = f"Processing page {i + 1}/{len(image_paths_and_rotations)} into PDF"
|
||||
if rotation != 0:
|
||||
progress_msg += f" (rotated {rotation}°)"
|
||||
self.socket.progress(message=progress_msg)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to add image {img_path} to PDF: {e}")
|
||||
continue
|
||||
|
||||
# Save the PDF
|
||||
pdf.output(output_path)
|
||||
|
||||
# Get target file path
|
||||
def _get_target_path(self, /) -> str:
|
||||
"""Get the full path where the PDF should be saved"""
|
||||
instructions_folder = os.path.join(
|
||||
current_app.static_folder, # type: ignore
|
||||
current_app.config['INSTRUCTIONS_FOLDER']
|
||||
)
|
||||
return os.path.join(instructions_folder, self.filename)
|
||||
|
||||
# Create BrickInstructions instance for the generated PDF
|
||||
def get_instructions(self, /) -> BrickInstructions:
|
||||
"""Return a BrickInstructions instance for the generated PDF"""
|
||||
return BrickInstructions(self.filename)
|
||||
@@ -8,16 +8,16 @@ from shutil import copyfileobj
|
||||
|
||||
from .exceptions import DownloadException
|
||||
if TYPE_CHECKING:
|
||||
from .minifigure import BrickMinifigure
|
||||
from .part import BrickPart
|
||||
from .rebrickable_minifigure import RebrickableMinifigure
|
||||
from .rebrickable_part import RebrickablePart
|
||||
from .rebrickable_set import RebrickableSet
|
||||
|
||||
|
||||
# A set, part or minifigure image from Rebrickable
|
||||
class RebrickableImage(object):
|
||||
set: 'RebrickableSet'
|
||||
minifigure: 'BrickMinifigure | None'
|
||||
part: 'BrickPart | None'
|
||||
minifigure: 'RebrickableMinifigure | None'
|
||||
part: 'RebrickablePart | None'
|
||||
|
||||
extension: str | None
|
||||
|
||||
@@ -26,8 +26,8 @@ class RebrickableImage(object):
|
||||
set: 'RebrickableSet',
|
||||
/,
|
||||
*,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
part: 'BrickPart | None' = None,
|
||||
minifigure: 'RebrickableMinifigure | None' = None,
|
||||
part: 'RebrickablePart | None' = None,
|
||||
):
|
||||
# Save all objects
|
||||
self.set = set
|
||||
@@ -81,16 +81,16 @@ class RebrickableImage(object):
|
||||
# Return the id depending on the objects provided
|
||||
def id(self, /) -> str:
|
||||
if self.part is not None:
|
||||
if self.part.fields.part_img_url_id is None:
|
||||
if self.part.fields.image_id is None:
|
||||
return RebrickableImage.nil_name()
|
||||
else:
|
||||
return self.part.fields.part_img_url_id
|
||||
return self.part.fields.image_id
|
||||
|
||||
if self.minifigure is not None:
|
||||
if self.minifigure.fields.set_img_url is None:
|
||||
if self.minifigure.fields.image is None:
|
||||
return RebrickableImage.nil_minifigure_name()
|
||||
else:
|
||||
return self.minifigure.fields.fig_num
|
||||
return self.minifigure.fields.figure
|
||||
|
||||
return self.set.fields.set
|
||||
|
||||
@@ -105,16 +105,16 @@ class RebrickableImage(object):
|
||||
# Return the url depending on the objects provided
|
||||
def url(self, /) -> str:
|
||||
if self.part is not None:
|
||||
if self.part.fields.part_img_url is None:
|
||||
if self.part.fields.image is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL']
|
||||
else:
|
||||
return self.part.fields.part_img_url
|
||||
return self.part.fields.image
|
||||
|
||||
if self.minifigure is not None:
|
||||
if self.minifigure.fields.set_img_url is None:
|
||||
if self.minifigure.fields.image is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL_MINIFIGURE']
|
||||
else:
|
||||
return self.minifigure.fields.set_img_url
|
||||
return self.minifigure.fields.image
|
||||
|
||||
return self.set.fields.image
|
||||
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
from sqlite3 import Row
|
||||
from typing import Any, TYPE_CHECKING
|
||||
|
||||
from flask import current_app, url_for
|
||||
|
||||
from .exceptions import ErrorException
|
||||
from .rebrickable_image import RebrickableImage
|
||||
from .record import BrickRecord
|
||||
if TYPE_CHECKING:
|
||||
from .set import BrickSet
|
||||
|
||||
|
||||
# A minifigure from Rebrickable
|
||||
class RebrickableMinifigure(BrickRecord):
|
||||
brickset: 'BrickSet | None'
|
||||
|
||||
# Queries
|
||||
select_query: str = 'rebrickable/minifigure/select'
|
||||
insert_query: str = 'rebrickable/minifigure/insert'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
record: Row | dict[str, Any] | None = None
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# Save the brickset
|
||||
self.brickset = brickset
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# Insert the minifigure from Rebrickable
|
||||
def insert_rebrickable(self, /) -> None:
|
||||
if self.brickset is None:
|
||||
raise ErrorException('Importing a minifigure from Rebrickable outside of a set is not supported') # noqa: E501
|
||||
|
||||
# Insert the Rebrickable minifigure to the database
|
||||
self.insert(
|
||||
commit=False,
|
||||
no_defer=True,
|
||||
override_query=RebrickableMinifigure.insert_query
|
||||
)
|
||||
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(
|
||||
self.brickset,
|
||||
minifigure=self,
|
||||
).download()
|
||||
|
||||
# Return a dict with common SQL parameters for a minifigure
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters = super().sql_parameters()
|
||||
|
||||
# Supplement from the brickset
|
||||
if self.brickset is not None and 'id' not in parameters:
|
||||
parameters['id'] = self.brickset.fields.id
|
||||
|
||||
return parameters
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for(
|
||||
'minifigure.details',
|
||||
figure=self.fields.figure,
|
||||
)
|
||||
|
||||
# Compute the url for minifigure image
|
||||
def url_for_image(self, /) -> str:
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
if self.fields.image is None:
|
||||
file = RebrickableImage.nil_minifigure_name()
|
||||
else:
|
||||
file = self.fields.figure
|
||||
|
||||
return RebrickableImage.static_url(file, 'MINIFIGURES_FOLDER')
|
||||
else:
|
||||
if self.fields.image is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL_MINIFIGURE']
|
||||
else:
|
||||
return self.fields.image
|
||||
|
||||
# Compute the url for the rebrickable page
|
||||
def url_for_rebrickable(self, /) -> str:
|
||||
if current_app.config['REBRICKABLE_LINKS']:
|
||||
try:
|
||||
return current_app.config['REBRICKABLE_LINK_MINIFIGURE_PATTERN'].format( # noqa: E501
|
||||
number=self.fields.figure,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ''
|
||||
|
||||
# Normalize from Rebrickable
|
||||
@staticmethod
|
||||
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
|
||||
# Extracting number
|
||||
number = int(str(data['set_num'])[5:])
|
||||
|
||||
return {
|
||||
'figure': str(data['set_num']),
|
||||
'number': int(number),
|
||||
'name': str(data['set_name']),
|
||||
'quantity': int(data['quantity']),
|
||||
'image': data['set_img_url'],
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .minifigure import BrickMinifigure
|
||||
from .rebrickable import Rebrickable
|
||||
from .rebrickable_image import RebrickableImage
|
||||
from .rebrickable_parts import RebrickableParts
|
||||
if TYPE_CHECKING:
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Minifigures from Rebrickable
|
||||
class RebrickableMinifigures(object):
|
||||
socket: 'BrickSocket'
|
||||
brickset: 'BrickSet'
|
||||
|
||||
def __init__(self, socket: 'BrickSocket', brickset: 'BrickSet', /):
|
||||
# Save the socket
|
||||
self.socket = socket
|
||||
|
||||
# Save the objects
|
||||
self.brickset = brickset
|
||||
|
||||
# Import the minifigures from Rebrickable
|
||||
def download(self, /) -> None:
|
||||
self.socket.auto_progress(
|
||||
message='Set {number}: loading minifigures from Rebrickable'.format( # noqa: E501
|
||||
number=self.brickset.fields.set,
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
logger.debug('rebrick.lego.get_set_minifigs("{set}")'.format(
|
||||
set=self.brickset.fields.set,
|
||||
))
|
||||
|
||||
minifigures = Rebrickable[BrickMinifigure](
|
||||
'get_set_minifigs',
|
||||
self.brickset.fields.set,
|
||||
BrickMinifigure,
|
||||
socket=self.socket,
|
||||
brickset=self.brickset,
|
||||
).list()
|
||||
|
||||
# Process each minifigure
|
||||
total = len(minifigures)
|
||||
for index, minifigure in enumerate(minifigures):
|
||||
# Insert into the database
|
||||
self.socket.auto_progress(
|
||||
message='Set {number}: inserting minifigure {current}/{total} into database'.format( # noqa: E501
|
||||
number=self.brickset.fields.set,
|
||||
current=index+1,
|
||||
total=total,
|
||||
)
|
||||
)
|
||||
|
||||
# Insert into database
|
||||
minifigure.insert(commit=False)
|
||||
|
||||
# Grab the image
|
||||
self.socket.progress(
|
||||
message='Set {number}: downloading minifigure {current}/{total} image'.format( # noqa: E501
|
||||
number=self.brickset.fields.set,
|
||||
current=index+1,
|
||||
total=total,
|
||||
)
|
||||
)
|
||||
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(
|
||||
self.brickset,
|
||||
minifigure=minifigure
|
||||
).download()
|
||||
|
||||
# Load the inventory
|
||||
RebrickableParts(
|
||||
self.socket,
|
||||
self.brickset,
|
||||
minifigure=minifigure,
|
||||
).download()
|
||||
@@ -0,0 +1,232 @@
|
||||
import os
|
||||
from sqlite3 import Row
|
||||
from typing import Any, TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import current_app, url_for
|
||||
|
||||
from .exceptions import ErrorException
|
||||
from .rebrickable_image import RebrickableImage
|
||||
from .record import BrickRecord
|
||||
if TYPE_CHECKING:
|
||||
from .minifigure import BrickMinifigure
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
|
||||
# A part from Rebrickable
|
||||
class RebrickablePart(BrickRecord):
|
||||
socket: 'BrickSocket'
|
||||
brickset: 'BrickSet | None'
|
||||
minifigure: 'BrickMinifigure | None'
|
||||
|
||||
# Queries
|
||||
select_query: str = 'rebrickable/part/select'
|
||||
insert_query: str = 'rebrickable/part/insert'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
record: Row | dict[str, Any] | None = None
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# Save the brickset
|
||||
self.brickset = brickset
|
||||
|
||||
# Save the minifigure
|
||||
self.minifigure = minifigure
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# Insert the part from Rebrickable
|
||||
def insert_rebrickable(self, /) -> None:
|
||||
if self.brickset is None:
|
||||
raise ErrorException('Importing a part from Rebrickable outside of a set is not supported') # noqa: E501
|
||||
|
||||
# Insert the Rebrickable part to the database
|
||||
self.insert(
|
||||
commit=False,
|
||||
no_defer=True,
|
||||
override_query=RebrickablePart.insert_query
|
||||
)
|
||||
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(
|
||||
self.brickset,
|
||||
minifigure=self.minifigure,
|
||||
part=self,
|
||||
).download()
|
||||
|
||||
# Return a dict with common SQL parameters for a part
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters = super().sql_parameters()
|
||||
|
||||
# Set id
|
||||
if self.brickset is not None:
|
||||
parameters['id'] = self.brickset.fields.id
|
||||
|
||||
# Use the minifigure number if present,
|
||||
if self.minifigure is not None:
|
||||
parameters['figure'] = self.minifigure.fields.figure
|
||||
else:
|
||||
parameters['figure'] = None
|
||||
|
||||
return parameters
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for(
|
||||
'part.details',
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
)
|
||||
|
||||
# Compute the url for the bricklink page
|
||||
def url_for_bricklink(self, /) -> str:
|
||||
if current_app.config['BRICKLINK_LINKS']:
|
||||
try:
|
||||
# Use BrickLink part number if available and not None/empty, otherwise fall back to Rebrickable part
|
||||
bricklink_part = getattr(self.fields, 'bricklink_part_num', None)
|
||||
part_param = bricklink_part if bricklink_part else self.fields.part
|
||||
|
||||
# Use BrickLink color ID if available and not None, otherwise fall back to Rebrickable color
|
||||
bricklink_color = getattr(self.fields, 'bricklink_color_id', None)
|
||||
color_param = bricklink_color if bricklink_color is not None else self.fields.color
|
||||
# print(f'BrickLink URL parameters: part={part_param}, color={color_param}') # Debugging line, can be removed later
|
||||
return current_app.config['BRICKLINK_LINK_PART_PATTERN'].format( # noqa: E501
|
||||
part=part_param,
|
||||
color=color_param,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ''
|
||||
|
||||
# Compute the url for the part image
|
||||
def url_for_image(self, /) -> str:
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
if self.fields.image is None:
|
||||
file = RebrickableImage.nil_name()
|
||||
else:
|
||||
file = self.fields.image_id
|
||||
|
||||
return RebrickableImage.static_url(file, 'PARTS_FOLDER')
|
||||
else:
|
||||
if self.fields.image is None:
|
||||
return current_app.config['REBRICKABLE_IMAGE_NIL']
|
||||
else:
|
||||
return self.fields.image
|
||||
|
||||
# Compute the url for the original of the printed part
|
||||
def url_for_print(self, /) -> str:
|
||||
if self.fields.print is not None:
|
||||
return url_for(
|
||||
'part.details',
|
||||
part=self.fields.print,
|
||||
color=self.fields.color,
|
||||
)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Compute the url for the rebrickable page
|
||||
def url_for_rebrickable(self, /) -> str:
|
||||
if current_app.config['REBRICKABLE_LINKS']:
|
||||
try:
|
||||
if self.fields.url is not None:
|
||||
# The URL does not contain color info...
|
||||
return '{url}{color}'.format(
|
||||
url=self.fields.url,
|
||||
color=self.fields.color
|
||||
)
|
||||
else:
|
||||
return current_app.config['REBRICKABLE_LINK_PART_PATTERN'].format( # noqa: E501
|
||||
part=self.fields.part,
|
||||
color=self.fields.color,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ''
|
||||
|
||||
# Normalize from Rebrickable
|
||||
@staticmethod
|
||||
def from_rebrickable(
|
||||
data: dict[str, Any],
|
||||
/,
|
||||
*,
|
||||
brickset: 'BrickSet | None' = None,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
**_,
|
||||
) -> dict[str, Any]:
|
||||
record = {
|
||||
'id': None,
|
||||
'figure': None,
|
||||
'part': data['part']['part_num'],
|
||||
'color': data['color']['id'],
|
||||
'spare': data['is_spare'],
|
||||
'quantity': data['quantity'],
|
||||
'rebrickable_inventory': data['id'],
|
||||
'element': data['element_id'],
|
||||
'color_id': data['color']['id'],
|
||||
'color_name': data['color']['name'],
|
||||
'color_rgb': data['color']['rgb'],
|
||||
'color_transparent': data['color']['is_trans'],
|
||||
'bricklink_color_id': None,
|
||||
'bricklink_color_name': None,
|
||||
'bricklink_part_num': None,
|
||||
'name': data['part']['name'],
|
||||
'category': data['part']['part_cat_id'],
|
||||
'image': data['part']['part_img_url'],
|
||||
'image_id': None,
|
||||
'url': data['part']['part_url'],
|
||||
'print': data['part']['print_of']
|
||||
}
|
||||
|
||||
# Extract BrickLink color info if available in external_ids
|
||||
if 'color' in data and 'external_ids' in data['color']:
|
||||
external_ids = data['color']['external_ids']
|
||||
if 'BrickLink' in external_ids and external_ids['BrickLink']:
|
||||
bricklink_data = external_ids['BrickLink']
|
||||
|
||||
# Extract BrickLink color ID and name from the nested structure
|
||||
if isinstance(bricklink_data, dict):
|
||||
if 'ext_ids' in bricklink_data and bricklink_data['ext_ids']:
|
||||
record['bricklink_color_id'] = bricklink_data['ext_ids'][0]
|
||||
|
||||
if 'ext_descrs' in bricklink_data and bricklink_data['ext_descrs']:
|
||||
# ext_descrs is a list of lists, get the first description from the first list
|
||||
if len(bricklink_data['ext_descrs']) > 0 and len(bricklink_data['ext_descrs'][0]) > 0:
|
||||
record['bricklink_color_name'] = bricklink_data['ext_descrs'][0][0]
|
||||
|
||||
# Extract BrickLink part number if available
|
||||
if 'part' in data and 'external_ids' in data['part']:
|
||||
part_external_ids = data['part']['external_ids']
|
||||
if 'BrickLink' in part_external_ids and part_external_ids['BrickLink']:
|
||||
bricklink_parts = part_external_ids['BrickLink']
|
||||
if isinstance(bricklink_parts, list) and len(bricklink_parts) > 0:
|
||||
record['bricklink_part_num'] = bricklink_parts[0]
|
||||
|
||||
if brickset is not None:
|
||||
record['id'] = brickset.fields.id
|
||||
|
||||
if minifigure is not None:
|
||||
record['figure'] = minifigure.fields.figure
|
||||
|
||||
# Extract the file name
|
||||
if record['image'] is not None:
|
||||
image_id, _ = os.path.splitext(
|
||||
os.path.basename(
|
||||
urlparse(record['image']).path
|
||||
)
|
||||
)
|
||||
|
||||
if image_id is not None or image_id != '':
|
||||
record['image_id'] = image_id
|
||||
|
||||
return record
|
||||
@@ -1,113 +0,0 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .part import BrickPart
|
||||
from .rebrickable import Rebrickable
|
||||
from .rebrickable_image import RebrickableImage
|
||||
if TYPE_CHECKING:
|
||||
from .minifigure import BrickMinifigure
|
||||
from .set import BrickSet
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# A list of parts from Rebrickable
|
||||
class RebrickableParts(object):
|
||||
socket: 'BrickSocket'
|
||||
brickset: 'BrickSet'
|
||||
minifigure: 'BrickMinifigure | None'
|
||||
|
||||
number: str
|
||||
kind: str
|
||||
method: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
socket: 'BrickSocket',
|
||||
brickset: 'BrickSet',
|
||||
/,
|
||||
*,
|
||||
minifigure: 'BrickMinifigure | None' = None,
|
||||
):
|
||||
# Save the socket
|
||||
self.socket = socket
|
||||
|
||||
# Save the objects
|
||||
self.brickset = brickset
|
||||
self.minifigure = minifigure
|
||||
|
||||
if self.minifigure is not None:
|
||||
self.number = self.minifigure.fields.fig_num
|
||||
self.kind = 'Minifigure'
|
||||
self.method = 'get_minifig_elements'
|
||||
else:
|
||||
self.number = self.brickset.fields.set
|
||||
self.kind = 'Set'
|
||||
self.method = 'get_set_elements'
|
||||
|
||||
# Import the parts from Rebrickable
|
||||
def download(self, /) -> None:
|
||||
self.socket.auto_progress(
|
||||
message='{kind} {number}: loading parts inventory from Rebrickable'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
number=self.number,
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
logger.debug('rebrick.lego.{method}("{number}")'.format(
|
||||
method=self.method,
|
||||
number=self.number,
|
||||
))
|
||||
|
||||
inventory = Rebrickable[BrickPart](
|
||||
self.method,
|
||||
self.number,
|
||||
BrickPart,
|
||||
socket=self.socket,
|
||||
brickset=self.brickset,
|
||||
minifigure=self.minifigure,
|
||||
).list()
|
||||
|
||||
# Process each part
|
||||
total = len(inventory)
|
||||
for index, part in enumerate(inventory):
|
||||
# Skip spare parts
|
||||
if (
|
||||
current_app.config['SKIP_SPARE_PARTS'] and
|
||||
part.fields.is_spare
|
||||
):
|
||||
continue
|
||||
|
||||
# Insert into the database
|
||||
self.socket.auto_progress(
|
||||
message='{kind} {number}: inserting part {current}/{total} into database'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
number=self.number,
|
||||
current=index+1,
|
||||
total=total,
|
||||
)
|
||||
)
|
||||
|
||||
# Insert into database
|
||||
part.insert(commit=False)
|
||||
|
||||
# Grab the image
|
||||
self.socket.progress(
|
||||
message='{kind} {number}: downloading part {current}/{total} image'.format( # noqa: E501
|
||||
kind=self.kind,
|
||||
number=self.number,
|
||||
current=index+1,
|
||||
total=total,
|
||||
)
|
||||
)
|
||||
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(
|
||||
self.brickset,
|
||||
minifigure=self.minifigure,
|
||||
part=part,
|
||||
).download()
|
||||
@@ -1,9 +1,9 @@
|
||||
import logging
|
||||
from sqlite3 import Row
|
||||
import traceback
|
||||
from typing import Any, TYPE_CHECKING
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
|
||||
from flask import current_app
|
||||
from flask import current_app, url_for
|
||||
|
||||
from .exceptions import ErrorException, NotFoundException
|
||||
from .instructions import BrickInstructions
|
||||
@@ -21,7 +21,6 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# A set from Rebrickable
|
||||
class RebrickableSet(BrickRecord):
|
||||
socket: 'BrickSocket'
|
||||
theme: 'BrickTheme'
|
||||
instructions: list[BrickInstructions]
|
||||
|
||||
@@ -36,7 +35,6 @@ class RebrickableSet(BrickRecord):
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
socket: 'BrickSocket | None' = None,
|
||||
record: Row | dict[str, Any] | None = None
|
||||
):
|
||||
super().__init__()
|
||||
@@ -44,26 +42,21 @@ class RebrickableSet(BrickRecord):
|
||||
# Placeholders
|
||||
self.instructions = []
|
||||
|
||||
# Save the socket
|
||||
if socket is not None:
|
||||
self.socket = socket
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# Import the set from Rebrickable
|
||||
def download_rebrickable(self, /) -> None:
|
||||
# Insert the set from Rebrickable
|
||||
def insert_rebrickable(self, /) -> None:
|
||||
# Insert the Rebrickable set to the database
|
||||
rows, _ = self.insert(
|
||||
self.insert(
|
||||
commit=False,
|
||||
no_defer=True,
|
||||
override_query=RebrickableSet.insert_query
|
||||
)
|
||||
|
||||
if rows > 0:
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(self).download()
|
||||
if not current_app.config['USE_REMOTE_IMAGES']:
|
||||
RebrickableImage(self).download()
|
||||
|
||||
# Ingest a set
|
||||
def ingest(self, record: Row | dict[str, Any], /):
|
||||
@@ -88,20 +81,21 @@ class RebrickableSet(BrickRecord):
|
||||
# Load the set from Rebrickable
|
||||
def load(
|
||||
self,
|
||||
socket: 'BrickSocket',
|
||||
data: dict[str, Any],
|
||||
/,
|
||||
*,
|
||||
from_download=False,
|
||||
) -> bool:
|
||||
# Reset the progress
|
||||
self.socket.progress_count = 0
|
||||
self.socket.progress_total = 2
|
||||
socket.progress_count = 0
|
||||
socket.progress_total = 2
|
||||
|
||||
try:
|
||||
self.socket.auto_progress(message='Parsing set number')
|
||||
socket.auto_progress(message='Parsing set number')
|
||||
set = parse_set(str(data['set']))
|
||||
|
||||
self.socket.auto_progress(
|
||||
socket.auto_progress(
|
||||
message='Set {set}: loading from Rebrickable'.format(
|
||||
set=set,
|
||||
),
|
||||
@@ -118,12 +112,12 @@ class RebrickableSet(BrickRecord):
|
||||
instance=self,
|
||||
).get()
|
||||
|
||||
self.socket.emit('SET_LOADED', self.short(
|
||||
socket.emit('SET_LOADED', self.short(
|
||||
from_download=from_download
|
||||
))
|
||||
|
||||
if not from_download:
|
||||
self.socket.complete(
|
||||
socket.complete(
|
||||
message='Set {set}: loaded from Rebrickable'.format(
|
||||
set=self.fields.set
|
||||
)
|
||||
@@ -132,7 +126,7 @@ class RebrickableSet(BrickRecord):
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.socket.fail(
|
||||
socket.fail(
|
||||
message='Could not load the set from Rebrickable: {error}. Data: {data}'.format( # noqa: E501
|
||||
error=str(e),
|
||||
data=data,
|
||||
@@ -144,6 +138,21 @@ class RebrickableSet(BrickRecord):
|
||||
|
||||
return False
|
||||
|
||||
# Select a specific set (with a set)
|
||||
def select_specific(self, set: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.set = set
|
||||
|
||||
# Load from database
|
||||
if not self.select():
|
||||
raise NotFoundException(
|
||||
'Set with set {set} was not found in the database'.format(
|
||||
set=self.fields.set,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Return a short form of the Rebrickable set
|
||||
def short(self, /, *, from_download: bool = False) -> dict[str, Any]:
|
||||
return {
|
||||
@@ -170,6 +179,19 @@ class RebrickableSet(BrickRecord):
|
||||
|
||||
return ''
|
||||
|
||||
# Compute the url for the bricklink page
|
||||
def url_for_bricklink(self, /) -> str:
|
||||
if current_app.config['BRICKLINK_LINKS']:
|
||||
return current_app.config['BRICKLINK_LINK_SET_PATTERN'].format(
|
||||
set_num=self.fields.set
|
||||
)
|
||||
|
||||
return ''
|
||||
|
||||
# Compute the url for the refresh button
|
||||
def url_for_refresh(self, /) -> str:
|
||||
return url_for('set.refresh', set=self.fields.set)
|
||||
|
||||
# Normalize from Rebrickable
|
||||
@staticmethod
|
||||
def from_rebrickable(data: dict[str, Any], /, **_) -> dict[str, Any]:
|
||||
|
||||
@@ -9,13 +9,21 @@ class RebrickableSetList(BrickRecordList[RebrickableSet]):
|
||||
|
||||
# Queries
|
||||
select_query: str = 'rebrickable/set/list'
|
||||
refresh_query: str = 'rebrickable/set/need_refresh'
|
||||
|
||||
# Implementation of abstract list method
|
||||
def list(self, /, *, override_query: str | None = None, **context) -> None:
|
||||
# Load the sets from the database
|
||||
for record in self.select(override_query=override_query, **context):
|
||||
rebrickable_set = RebrickableSet(record=record)
|
||||
self.records.append(rebrickable_set)
|
||||
|
||||
# All the sets
|
||||
def all(self, /) -> Self:
|
||||
# Load the sets from the database
|
||||
for record in self.select():
|
||||
rebrickable_set = RebrickableSet(record=record)
|
||||
|
||||
self.records.append(rebrickable_set)
|
||||
|
||||
self.list()
|
||||
return self
|
||||
|
||||
# Sets needing refresh
|
||||
def need_refresh(self, /) -> Self:
|
||||
self.list(override_query=self.refresh_query)
|
||||
return self
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from sqlite3 import Row
|
||||
from typing import Any, ItemsView, Tuple
|
||||
from typing import Any, ItemsView
|
||||
|
||||
from .fields import BrickRecordFields
|
||||
from .sql import BrickSQL
|
||||
@@ -31,14 +31,14 @@ class BrickRecord(object):
|
||||
commit=True,
|
||||
no_defer=False,
|
||||
override_query: str | None = None
|
||||
) -> Tuple[int, str]:
|
||||
) -> None:
|
||||
if override_query:
|
||||
query = override_query
|
||||
else:
|
||||
query = self.insert_query
|
||||
|
||||
database = BrickSQL()
|
||||
rows, q = database.execute(
|
||||
database.execute(
|
||||
query,
|
||||
parameters=self.sql_parameters(),
|
||||
defer=not commit and not no_defer,
|
||||
@@ -47,8 +47,6 @@ class BrickRecord(object):
|
||||
if commit:
|
||||
database.commit()
|
||||
|
||||
return rows, q
|
||||
|
||||
# Shorthand to field items
|
||||
def items(self, /) -> ItemsView[str, Any]:
|
||||
return self.fields.__dict__.items()
|
||||
|
||||
+100
-5
@@ -1,5 +1,6 @@
|
||||
import re
|
||||
from sqlite3 import Row
|
||||
from typing import Any, Generator, Generic, ItemsView, TypeVar, TYPE_CHECKING
|
||||
from typing import Any, Generator, Generic, ItemsView, Self, TypeVar, TYPE_CHECKING
|
||||
|
||||
from .fields import BrickRecordFields
|
||||
from .sql import BrickSQL
|
||||
@@ -8,16 +9,26 @@ if TYPE_CHECKING:
|
||||
from .part import BrickPart
|
||||
from .rebrickable_set import RebrickableSet
|
||||
from .set import BrickSet
|
||||
from .set_checkbox import BrickSetCheckbox
|
||||
from .set_owner import BrickSetOwner
|
||||
from .set_purchase_location import BrickSetPurchaseLocation
|
||||
from .set_status import BrickSetStatus
|
||||
from .set_storage import BrickSetStorage
|
||||
from .set_tag import BrickSetTag
|
||||
from .wish import BrickWish
|
||||
from .wish_owner import BrickWishOwner
|
||||
|
||||
T = TypeVar(
|
||||
'T',
|
||||
'BrickSet',
|
||||
'BrickSetCheckbox',
|
||||
'BrickPart',
|
||||
'BrickMinifigure',
|
||||
'BrickPart',
|
||||
'BrickSet',
|
||||
'BrickSetOwner',
|
||||
'BrickSetPurchaseLocation',
|
||||
'BrickSetStatus',
|
||||
'BrickSetStorage',
|
||||
'BrickSetTag',
|
||||
'BrickWish',
|
||||
'BrickWishOwner',
|
||||
'RebrickableSet'
|
||||
)
|
||||
|
||||
@@ -62,6 +73,90 @@ class BrickRecordList(Generic[T]):
|
||||
**context
|
||||
)
|
||||
|
||||
# Generic pagination method for all record lists
|
||||
def paginate(
|
||||
self,
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
sort_field: str | None = None,
|
||||
sort_order: str = 'asc',
|
||||
count_query: str | None = None,
|
||||
list_query: str | None = None,
|
||||
field_mapping: dict[str, str] | None = None,
|
||||
**filter_context: Any
|
||||
) -> tuple['Self', int]:
|
||||
"""Generic pagination implementation for all record lists"""
|
||||
from .sql import BrickSQL
|
||||
|
||||
# Use provided queries or fall back to defaults
|
||||
list_query = list_query or getattr(self, 'all_query', None)
|
||||
if not list_query:
|
||||
raise NotImplementedError("Subclass must define all_query")
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
# Get total count by wrapping the main query
|
||||
if count_query:
|
||||
# Use provided count query
|
||||
count_result = BrickSQL().fetchone(count_query, **filter_context)
|
||||
total_count = count_result['total_count'] if count_result else 0
|
||||
else:
|
||||
# Generate count by wrapping the main query (without ORDER BY, LIMIT, OFFSET)
|
||||
count_context = {k: v for k, v in filter_context.items()
|
||||
if k not in ['order', 'limit', 'offset']}
|
||||
|
||||
# Get the main query SQL without pagination clauses
|
||||
main_sql = BrickSQL().load_query(list_query, **count_context)
|
||||
|
||||
# Remove ORDER BY, LIMIT, OFFSET clauses for counting
|
||||
# Remove ORDER BY clause and everything after it that's not part of subqueries
|
||||
count_sql = re.sub(r'\s+ORDER\s+BY\s+[^)]*?(\s+LIMIT|\s+OFFSET|$)', r'\1', main_sql, flags=re.IGNORECASE)
|
||||
# Remove LIMIT and OFFSET
|
||||
count_sql = re.sub(r'\s+LIMIT\s+\d+', '', count_sql, flags=re.IGNORECASE)
|
||||
count_sql = re.sub(r'\s+OFFSET\s+\d+', '', count_sql, flags=re.IGNORECASE)
|
||||
|
||||
# Wrap in COUNT(*)
|
||||
wrapped_sql = f"SELECT COUNT(*) as total_count FROM ({count_sql.strip()})"
|
||||
|
||||
count_result = BrickSQL().raw_execute(wrapped_sql, {}).fetchone()
|
||||
total_count = count_result['total_count'] if count_result else 0
|
||||
|
||||
# Prepare sort order
|
||||
order_clause = None
|
||||
if sort_field and field_mapping and sort_field in field_mapping:
|
||||
sql_field = field_mapping[sort_field]
|
||||
direction = 'DESC' if sort_order.lower() == 'desc' else 'ASC'
|
||||
order_clause = f'{sql_field} {direction}'
|
||||
|
||||
# Build pagination context
|
||||
pagination_context = {
|
||||
'limit': per_page,
|
||||
'offset': offset,
|
||||
'order': order_clause or getattr(self, 'order', None),
|
||||
**filter_context
|
||||
}
|
||||
|
||||
# Load paginated results using the existing list() method
|
||||
# Check if this is a set list that needs do_theme parameter
|
||||
if hasattr(self, 'themes'): # Only BrickSetList has this attribute
|
||||
self.list(override_query=list_query, do_theme=True, **pagination_context)
|
||||
else:
|
||||
self.list(override_query=list_query, **pagination_context)
|
||||
|
||||
return self, total_count
|
||||
|
||||
# Base method that subclasses can override
|
||||
def list(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
override_query: str | None = None,
|
||||
**context: Any,
|
||||
) -> None:
|
||||
"""Load records from database - should be implemented by subclasses that use pagination"""
|
||||
raise NotImplementedError("Subclass must implement list() method")
|
||||
|
||||
# Generic SQL parameters from fields
|
||||
def sql_parameters(self, /) -> dict[str, Any]:
|
||||
parameters: dict[str, Any] = {}
|
||||
|
||||
+23
-3
@@ -1,7 +1,12 @@
|
||||
from .instructions_list import BrickInstructionsList
|
||||
from .retired_list import BrickRetiredList
|
||||
from .set_checkbox_list import BrickSetCheckboxList
|
||||
from .set_owner_list import BrickSetOwnerList
|
||||
from .set_purchase_location_list import BrickSetPurchaseLocationList
|
||||
from .set_status_list import BrickSetStatusList
|
||||
from .set_storage_list import BrickSetStorageList
|
||||
from .set_tag_list import BrickSetTagList
|
||||
from .theme_list import BrickThemeList
|
||||
from .wish_owner_list import BrickWishOwnerList
|
||||
|
||||
|
||||
# Reload everything related to a database after an operation
|
||||
@@ -11,13 +16,28 @@ def reload() -> None:
|
||||
# Reload the instructions
|
||||
BrickInstructionsList(force=True)
|
||||
|
||||
# Reload the checkboxes
|
||||
BrickSetCheckboxList(force=True)
|
||||
# Reload the set owners
|
||||
BrickSetOwnerList.new(force=True)
|
||||
|
||||
# Reload the set purchase locations
|
||||
BrickSetPurchaseLocationList.new(force=True)
|
||||
|
||||
# Reload the set statuses
|
||||
BrickSetStatusList.new(force=True)
|
||||
|
||||
# Reload the set storages
|
||||
BrickSetStorageList.new(force=True)
|
||||
|
||||
# Reload the set tags
|
||||
BrickSetTagList.new(force=True)
|
||||
|
||||
# Reload retired sets
|
||||
BrickRetiredList(force=True)
|
||||
|
||||
# Reload themes
|
||||
BrickThemeList(force=True)
|
||||
|
||||
# Reload the wish owners
|
||||
BrickWishOwnerList.new(force=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
+227
-61
@@ -1,19 +1,23 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import traceback
|
||||
from typing import Any, Self
|
||||
from typing import Any, Self, TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import url_for
|
||||
from flask import current_app, url_for
|
||||
|
||||
from .exceptions import DatabaseException, NotFoundException
|
||||
from .exceptions import NotFoundException, DatabaseException, ErrorException
|
||||
from .minifigure_list import BrickMinifigureList
|
||||
from .part_list import BrickPartList
|
||||
from .rebrickable_minifigures import RebrickableMinifigures
|
||||
from .rebrickable_parts import RebrickableParts
|
||||
from .rebrickable_set import RebrickableSet
|
||||
from .set_checkbox import BrickSetCheckbox
|
||||
from .set_checkbox_list import BrickSetCheckboxList
|
||||
from .set_owner_list import BrickSetOwnerList
|
||||
from .set_purchase_location_list import BrickSetPurchaseLocationList
|
||||
from .set_status_list import BrickSetStatusList
|
||||
from .set_storage_list import BrickSetStorageList
|
||||
from .set_tag_list import BrickSetTagList
|
||||
from .sql import BrickSQL
|
||||
if TYPE_CHECKING:
|
||||
from .socket import BrickSocket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,6 +28,8 @@ class BrickSet(RebrickableSet):
|
||||
select_query: str = 'set/select/full'
|
||||
light_query: str = 'set/select/light'
|
||||
insert_query: str = 'set/insert'
|
||||
update_purchase_date_query: str = 'set/update/purchase_date'
|
||||
update_purchase_price_query: str = 'set/update/purchase_price'
|
||||
|
||||
# Delete a set
|
||||
def delete(self, /) -> None:
|
||||
@@ -33,81 +39,167 @@ class BrickSet(RebrickableSet):
|
||||
)
|
||||
|
||||
# Import a set into the database
|
||||
def download(self, data: dict[str, Any], /) -> None:
|
||||
def download(self, socket: 'BrickSocket', data: dict[str, Any], /) -> bool:
|
||||
# Load the set
|
||||
if not self.load(data, from_download=True):
|
||||
return
|
||||
if not self.load(socket, data, from_download=True):
|
||||
return False
|
||||
|
||||
try:
|
||||
# Insert into the database
|
||||
self.socket.auto_progress(
|
||||
message='Set {number}: inserting into database'.format(
|
||||
number=self.fields.set
|
||||
socket.auto_progress(
|
||||
message='Set {set}: inserting into database'.format(
|
||||
set=self.fields.set
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
# Grabbing the refresh flag
|
||||
refresh: bool = bool(data.get('refresh', False))
|
||||
|
||||
# Generate an UUID for self
|
||||
self.fields.id = str(uuid4())
|
||||
|
||||
# Insert into database
|
||||
self.insert(commit=False)
|
||||
if not refresh:
|
||||
# Save the storage
|
||||
storage = BrickSetStorageList.get(
|
||||
data.get('storage', ''),
|
||||
allow_none=True
|
||||
)
|
||||
self.fields.storage = storage.fields.id
|
||||
|
||||
# Execute the parent download method
|
||||
self.download_rebrickable()
|
||||
# Save the purchase location
|
||||
purchase_location = BrickSetPurchaseLocationList.get(
|
||||
data.get('purchase_location', ''),
|
||||
allow_none=True
|
||||
)
|
||||
self.fields.purchase_location = purchase_location.fields.id
|
||||
|
||||
# Insert into database
|
||||
self.insert(commit=False)
|
||||
|
||||
# Save the owners
|
||||
owners: list[str] = list(data.get('owners', []))
|
||||
|
||||
for id in owners:
|
||||
owner = BrickSetOwnerList.get(id)
|
||||
owner.update_set_state(self, state=True)
|
||||
|
||||
# Save the tags
|
||||
tags: list[str] = list(data.get('tags', []))
|
||||
|
||||
for id in tags:
|
||||
tag = BrickSetTagList.get(id)
|
||||
tag.update_set_state(self, state=True)
|
||||
|
||||
# Insert the rebrickable set into database
|
||||
self.insert_rebrickable()
|
||||
|
||||
# Load the inventory
|
||||
RebrickableParts(self.socket, self).download()
|
||||
if not BrickPartList.download(socket, self, refresh=refresh):
|
||||
return False
|
||||
|
||||
# Load the minifigures
|
||||
RebrickableMinifigures(self.socket, self).download()
|
||||
if not BrickMinifigureList.download(socket, self, refresh=refresh):
|
||||
return False
|
||||
|
||||
# Commit the transaction to the database
|
||||
self.socket.auto_progress(
|
||||
message='Set {number}: writing to the database'.format(
|
||||
number=self.fields.set
|
||||
socket.auto_progress(
|
||||
message='Set {set}: writing to the database'.format(
|
||||
set=self.fields.set
|
||||
),
|
||||
increment_total=True,
|
||||
)
|
||||
|
||||
BrickSQL().commit()
|
||||
|
||||
# Info
|
||||
logger.info('Set {number}: imported (id: {id})'.format(
|
||||
number=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
if refresh:
|
||||
# Info
|
||||
logger.info('Set {set}: imported (id: {id})'.format(
|
||||
set=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# Complete
|
||||
self.socket.complete(
|
||||
message='Set {number}: imported (<a href="{url}">Go to the set</a>)'.format( # noqa: E501
|
||||
number=self.fields.set,
|
||||
url=self.url()
|
||||
),
|
||||
download=True
|
||||
)
|
||||
# Complete
|
||||
socket.complete(
|
||||
message='Set {set}: refreshed'.format( # noqa: E501
|
||||
set=self.fields.set,
|
||||
),
|
||||
download=True
|
||||
)
|
||||
else:
|
||||
# Info
|
||||
logger.info('Set {set}: refreshed'.format(
|
||||
set=self.fields.set,
|
||||
))
|
||||
|
||||
# Complete
|
||||
socket.complete(
|
||||
message='Set {set}: imported (<a href="{url}">Go to the set</a>)'.format( # noqa: E501
|
||||
set=self.fields.set,
|
||||
url=self.url()
|
||||
),
|
||||
download=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.socket.fail(
|
||||
message='Error while importing set {number}: {error}'.format(
|
||||
number=self.fields.set,
|
||||
socket.fail(
|
||||
message='Error while importing set {set}: {error}'.format(
|
||||
set=self.fields.set,
|
||||
error=e,
|
||||
)
|
||||
)
|
||||
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
# Insert a Rebrickable set
|
||||
def insert_rebrickable(self, /) -> None:
|
||||
self.insert()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Purchase date
|
||||
def purchase_date(self, /, *, standard: bool = False) -> str:
|
||||
if self.fields.purchase_date is not None:
|
||||
time = datetime.fromtimestamp(self.fields.purchase_date)
|
||||
|
||||
if standard:
|
||||
return time.strftime('%Y/%m/%d')
|
||||
else:
|
||||
return time.strftime(
|
||||
current_app.config['PURCHASE_DATE_FORMAT']
|
||||
)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Purchase date max formatted for consolidated sets
|
||||
def purchase_date_max_formatted(self, /, *, standard: bool = False) -> str:
|
||||
if hasattr(self.fields, 'purchase_date_max') and self.fields.purchase_date_max is not None:
|
||||
time = datetime.fromtimestamp(self.fields.purchase_date_max)
|
||||
|
||||
if standard:
|
||||
return time.strftime('%Y/%m/%d')
|
||||
else:
|
||||
return time.strftime(
|
||||
current_app.config['PURCHASE_DATE_FORMAT']
|
||||
)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Purchase price with currency
|
||||
def purchase_price(self, /) -> str:
|
||||
if self.fields.purchase_price is not None:
|
||||
return '{price}{currency}'.format(
|
||||
price=self.fields.purchase_price,
|
||||
currency=current_app.config['PURCHASE_CURRENCY']
|
||||
)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Minifigures
|
||||
def minifigures(self, /) -> BrickMinifigureList:
|
||||
return BrickMinifigureList().load(self)
|
||||
return BrickMinifigureList().from_set(self)
|
||||
|
||||
# Parts
|
||||
def parts(self, /) -> BrickPartList:
|
||||
return BrickPartList().load(self)
|
||||
return BrickPartList().list_specific(self)
|
||||
|
||||
# Select a light set (with an id)
|
||||
def select_light(self, id: str, /) -> Self:
|
||||
@@ -131,7 +223,9 @@ class BrickSet(RebrickableSet):
|
||||
|
||||
# Load from database
|
||||
if not self.select(
|
||||
statuses=BrickSetCheckboxList().as_columns(solo=True)
|
||||
owners=BrickSetOwnerList.as_columns(),
|
||||
statuses=BrickSetStatusList.as_columns(all=True),
|
||||
tags=BrickSetTagList.as_columns(),
|
||||
):
|
||||
raise NotFoundException(
|
||||
'Set with ID {id} was not found in the database'.format(
|
||||
@@ -141,30 +235,80 @@ class BrickSet(RebrickableSet):
|
||||
|
||||
return self
|
||||
|
||||
# Update a status
|
||||
def update_status(
|
||||
self,
|
||||
checkbox: BrickSetCheckbox,
|
||||
status: bool,
|
||||
/
|
||||
) -> None:
|
||||
parameters = self.sql_parameters()
|
||||
parameters['status'] = status
|
||||
# Update the purchase date
|
||||
def update_purchase_date(self, json: Any | None, /) -> Any:
|
||||
value = json.get('value', None) # type: ignore
|
||||
|
||||
try:
|
||||
if value == '':
|
||||
value = None
|
||||
|
||||
if value is not None:
|
||||
value = datetime.strptime(value, '%Y/%m/%d').timestamp()
|
||||
except Exception:
|
||||
raise ErrorException('{value} is not a date'.format(
|
||||
value=value,
|
||||
))
|
||||
|
||||
self.fields.purchase_date = value
|
||||
|
||||
# Update the status
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
'set/update/status',
|
||||
parameters=parameters,
|
||||
name=checkbox.as_column(),
|
||||
self.update_purchase_date_query,
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the status "{status}" for set {number} ({id})'.format( # noqa: E501
|
||||
status=checkbox.fields.name,
|
||||
number=self.fields.set,
|
||||
raise DatabaseException('Could not update the purchase date for set {set} ({id})'.format( # noqa: E501
|
||||
set=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('Purchase date changed to "{value}" for set {set} ({id})'.format( # noqa: E501
|
||||
value=value,
|
||||
set=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
return value
|
||||
|
||||
# Update the purchase price
|
||||
def update_purchase_price(self, json: Any | None, /) -> Any:
|
||||
value = json.get('value', None) # type: ignore
|
||||
|
||||
try:
|
||||
if value == '':
|
||||
value = None
|
||||
|
||||
if value is not None:
|
||||
value = float(value)
|
||||
except Exception:
|
||||
raise ErrorException('{value} is not a number or empty'.format(
|
||||
value=value,
|
||||
))
|
||||
|
||||
self.fields.purchase_price = value
|
||||
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
self.update_purchase_price_query,
|
||||
parameters=self.sql_parameters()
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the purchase price for set {set} ({id})'.format( # noqa: E501
|
||||
set=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# Info
|
||||
logger.info('Purchase price changed to "{value}" for set {set} ({id})'.format( # noqa: E501
|
||||
value=value,
|
||||
set=self.fields.set,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
return value
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for('set.details', id=self.fields.id)
|
||||
@@ -179,7 +323,10 @@ class BrickSet(RebrickableSet):
|
||||
|
||||
# Compute the url for the set instructions
|
||||
def url_for_instructions(self, /) -> str:
|
||||
if len(self.instructions):
|
||||
if (
|
||||
not current_app.config['HIDE_SET_INSTRUCTIONS'] and
|
||||
len(self.instructions)
|
||||
):
|
||||
return url_for(
|
||||
'set.details',
|
||||
id=self.fields.id,
|
||||
@@ -187,3 +334,22 @@ class BrickSet(RebrickableSet):
|
||||
)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Compute the url for the refresh button
|
||||
def url_for_refresh(self, /) -> str:
|
||||
return url_for('set.refresh', id=self.fields.id)
|
||||
|
||||
# Compute the url for the set storage
|
||||
def url_for_storage(self, /) -> str:
|
||||
if self.fields.storage is not None:
|
||||
return url_for('storage.details', id=self.fields.storage)
|
||||
else:
|
||||
return ''
|
||||
|
||||
# Update purchase date url
|
||||
def url_for_purchase_date(self, /) -> str:
|
||||
return url_for('set.update_purchase_date', id=self.fields.id)
|
||||
|
||||
# Update purchase price url
|
||||
def url_for_purchase_price(self, /) -> str:
|
||||
return url_for('set.update_purchase_price', id=self.fields.id)
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
from sqlite3 import Row
|
||||
from typing import Any, Self, Tuple
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .exceptions import DatabaseException, ErrorException, NotFoundException
|
||||
from .record import BrickRecord
|
||||
from .sql import BrickSQL
|
||||
|
||||
|
||||
# Lego set checkbox
|
||||
class BrickSetCheckbox(BrickRecord):
|
||||
# Queries
|
||||
select_query: str = 'checkbox/select'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
record: Row | dict[str, Any] | None = None,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# Ingest the record if it has one
|
||||
if record is not None:
|
||||
self.ingest(record)
|
||||
|
||||
# SQL column name
|
||||
def as_column(self) -> str:
|
||||
return 'status_{id}'.format(id=self.fields.id)
|
||||
|
||||
# HTML dataset name
|
||||
def as_dataset(self) -> str:
|
||||
return '{id}'.format(
|
||||
id=self.as_column().replace('_', '-')
|
||||
)
|
||||
|
||||
# Delete from database
|
||||
def delete(self) -> None:
|
||||
BrickSQL().executescript(
|
||||
'checkbox/delete',
|
||||
id=self.fields.id,
|
||||
)
|
||||
|
||||
# Grab data from a form
|
||||
def from_form(self, form: dict[str, str]) -> Self:
|
||||
name = form.get('name', None)
|
||||
grid = form.get('grid', None)
|
||||
|
||||
if name is None or name == '':
|
||||
raise ErrorException('Checkbox name cannot be empty')
|
||||
|
||||
# Security: eh.
|
||||
# Prevent self-ownage with accidental quote escape
|
||||
self.fields.name = name
|
||||
self.fields.safe_name = self.fields.name.replace("'", "''")
|
||||
self.fields.displayed_on_grid = grid == 'on'
|
||||
|
||||
return self
|
||||
|
||||
# Insert into database
|
||||
def insert(self, **_) -> Tuple[int, str]:
|
||||
# Generate an ID for the checkbox (with underscores to make it
|
||||
# column name friendly)
|
||||
self.fields.id = str(uuid4()).replace('-', '_')
|
||||
|
||||
BrickSQL().executescript(
|
||||
'checkbox/add',
|
||||
id=self.fields.id,
|
||||
name=self.fields.safe_name,
|
||||
displayed_on_grid=self.fields.displayed_on_grid
|
||||
)
|
||||
|
||||
# To accomodate the parent().insert we have overriden
|
||||
return 0, ''
|
||||
|
||||
# Rename the checkbox
|
||||
def rename(self, /) -> None:
|
||||
# Update the name
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
'checkbox/update/name',
|
||||
parameters=self.sql_parameters(),
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the name for checkbox {name} ({id})'.format( # noqa: E501
|
||||
name=self.fields.name,
|
||||
id=self.fields.id,
|
||||
))
|
||||
|
||||
# URL to change the status
|
||||
def status_url(self, id: str) -> str:
|
||||
return url_for(
|
||||
'set.update_status',
|
||||
id=id,
|
||||
checkbox_id=self.fields.id
|
||||
)
|
||||
|
||||
# Select a specific checkbox (with an id)
|
||||
def select_specific(self, id: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.id = id
|
||||
|
||||
# Load from database
|
||||
if not self.select():
|
||||
raise NotFoundException(
|
||||
'Checkbox with ID {id} was not found in the database'.format(
|
||||
id=self.fields.id,
|
||||
),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
# Update a status
|
||||
def update_status(
|
||||
self,
|
||||
name: str,
|
||||
status: bool,
|
||||
/
|
||||
) -> None:
|
||||
if not hasattr(self.fields, name) or name in ['id', 'name']:
|
||||
raise NotFoundException('{name} is not a field of a checkbox'.format( # noqa: E501
|
||||
name=name
|
||||
))
|
||||
|
||||
parameters = self.sql_parameters()
|
||||
parameters['status'] = status
|
||||
|
||||
# Update the status
|
||||
rows, _ = BrickSQL().execute_and_commit(
|
||||
'checkbox/update/status',
|
||||
parameters=parameters,
|
||||
name=name,
|
||||
)
|
||||
|
||||
if rows != 1:
|
||||
raise DatabaseException('Could not update the status "{status}" for checkbox {name} ({id})'.format( # noqa: E501
|
||||
status=name,
|
||||
name=self.fields.name,
|
||||
id=self.fields.id,
|
||||
))
|
||||
@@ -1,74 +0,0 @@
|
||||
import logging
|
||||
|
||||
from .exceptions import NotFoundException
|
||||
from .fields import BrickRecordFields
|
||||
from .record_list import BrickRecordList
|
||||
from .set_checkbox import BrickSetCheckbox
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Lego sets checkbox list
|
||||
class BrickSetCheckboxList(BrickRecordList[BrickSetCheckbox]):
|
||||
checkboxes: dict[str, BrickSetCheckbox]
|
||||
|
||||
# Queries
|
||||
select_query = 'checkbox/list'
|
||||
|
||||
def __init__(self, /, *, force: bool = False):
|
||||
# Load checkboxes only if there is none already loaded
|
||||
records = getattr(self, 'records', None)
|
||||
|
||||
if records is None or force:
|
||||
# Don't use super()__init__ as it would mask class variables
|
||||
self.fields = BrickRecordFields()
|
||||
|
||||
logger.info('Loading set checkboxes list')
|
||||
|
||||
BrickSetCheckboxList.records = []
|
||||
BrickSetCheckboxList.checkboxes = {}
|
||||
|
||||
# Load the checkboxes from the database
|
||||
for record in self.select():
|
||||
checkbox = BrickSetCheckbox(record=record)
|
||||
|
||||
BrickSetCheckboxList.records.append(checkbox)
|
||||
BrickSetCheckboxList.checkboxes[checkbox.fields.id] = checkbox
|
||||
|
||||
# Return the checkboxes as columns for a select
|
||||
def as_columns(
|
||||
self,
|
||||
/,
|
||||
*,
|
||||
solo: bool = False,
|
||||
table: str = 'bricktracker_set_statuses'
|
||||
) -> str:
|
||||
return ', '.join([
|
||||
'"{table}"."{column}"'.format(
|
||||
table=table,
|
||||
column=record.as_column(),
|
||||
)
|
||||
for record
|
||||
in self.records
|
||||
if solo or record.fields.displayed_on_grid
|
||||
])
|
||||
|
||||
# Grab a specific checkbox
|
||||
def get(self, id: str, /) -> BrickSetCheckbox:
|
||||
if id not in self.checkboxes:
|
||||
raise NotFoundException(
|
||||
'Checkbox with ID {id} was not found in the database'.format(
|
||||
id=self.fields.id,
|
||||
),
|
||||
)
|
||||
|
||||
return self.checkboxes[id]
|
||||
|
||||
# Get the list of checkboxes depending on the context
|
||||
def list(self, /, *, all: bool = False) -> list[BrickSetCheckbox]:
|
||||
return [
|
||||
record
|
||||
for record
|
||||
in self.records
|
||||
if all or record.fields.displayed_on_grid
|
||||
]
|
||||
+632
-92
@@ -1,18 +1,33 @@
|
||||
from typing import Self
|
||||
from typing import Any, Self, Union
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .record_list import BrickRecordList
|
||||
from .set_checkbox_list import BrickSetCheckboxList
|
||||
from .set_owner import BrickSetOwner
|
||||
from .set_owner_list import BrickSetOwnerList
|
||||
from .set_purchase_location import BrickSetPurchaseLocation
|
||||
from .set_purchase_location_list import BrickSetPurchaseLocationList
|
||||
from .set_status_list import BrickSetStatusList
|
||||
from .set_storage import BrickSetStorage
|
||||
from .set_storage_list import BrickSetStorageList
|
||||
from .set_tag import BrickSetTag
|
||||
from .set_tag_list import BrickSetTagList
|
||||
from .set import BrickSet
|
||||
from .theme_list import BrickThemeList
|
||||
from .instructions_list import BrickInstructionsList
|
||||
|
||||
|
||||
# All the sets from the database
|
||||
class BrickSetList(BrickRecordList[BrickSet]):
|
||||
themes: list[str]
|
||||
years: list[int]
|
||||
order: str
|
||||
|
||||
# Queries
|
||||
all_query: str = 'set/list/all'
|
||||
consolidated_query: str = 'set/list/consolidated'
|
||||
damaged_minifigure_query: str = 'set/list/damaged_minifigure'
|
||||
damaged_part_query: str = 'set/list/damaged_part'
|
||||
generic_query: str = 'set/list/generic'
|
||||
light_query: str = 'set/list/light'
|
||||
missing_minifigure_query: str = 'set/list/missing_minifigure'
|
||||
@@ -20,45 +35,536 @@ class BrickSetList(BrickRecordList[BrickSet]):
|
||||
select_query: str = 'set/list/all'
|
||||
using_minifigure_query: str = 'set/list/using_minifigure'
|
||||
using_part_query: str = 'set/list/using_part'
|
||||
using_storage_query: str = 'set/list/using_storage'
|
||||
without_storage_query: str = 'set/list/without_storage'
|
||||
|
||||
def __init__(self, /):
|
||||
super().__init__()
|
||||
|
||||
# Placeholders
|
||||
self.themes = []
|
||||
self.years = []
|
||||
|
||||
# Store the order for this list
|
||||
self.order = current_app.config['SETS_DEFAULT_ORDER']
|
||||
|
||||
# All the sets
|
||||
def all(self, /) -> Self:
|
||||
themes = set()
|
||||
|
||||
# Load the sets from the database
|
||||
for record in self.select(
|
||||
order=self.order,
|
||||
statuses=BrickSetCheckboxList().as_columns()
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
themes.add(brickset.theme.name)
|
||||
|
||||
# Convert the set into a list and sort it
|
||||
self.themes = list(themes)
|
||||
self.themes.sort()
|
||||
# Load the sets from the database with metadata context for filtering
|
||||
filter_context = {
|
||||
'owners': BrickSetOwnerList.as_columns(),
|
||||
'statuses': BrickSetStatusList.as_columns(),
|
||||
'tags': BrickSetTagList.as_columns(),
|
||||
}
|
||||
self.list(do_theme=True, **filter_context)
|
||||
|
||||
return self
|
||||
|
||||
# A generic list of the different sets
|
||||
def generic(self, /) -> Self:
|
||||
for record in self.select(
|
||||
override_query=self.generic_query,
|
||||
order=self.order
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
# All sets in consolidated/grouped view
|
||||
def all_consolidated(self, /) -> Self:
|
||||
# Load the sets from the database using consolidated query with metadata context
|
||||
filter_context = {
|
||||
'owners_dict': BrickSetOwnerList.as_column_mapping(),
|
||||
'statuses_dict': BrickSetStatusList.as_column_mapping(),
|
||||
'tags_dict': BrickSetTagList.as_column_mapping(),
|
||||
}
|
||||
self.list(override_query=self.consolidated_query, do_theme=True, **filter_context)
|
||||
|
||||
self.records.append(brickset)
|
||||
return self
|
||||
|
||||
# All sets with pagination and filtering
|
||||
def all_filtered_paginated(
|
||||
self,
|
||||
search_query: str | None = None,
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
sort_field: str | None = None,
|
||||
sort_order: str = 'asc',
|
||||
status_filter: str | None = None,
|
||||
theme_filter: str | None = None,
|
||||
owner_filter: str | None = None,
|
||||
purchase_location_filter: str | None = None,
|
||||
storage_filter: str | None = None,
|
||||
tag_filter: str | None = None,
|
||||
year_filter: str | None = None,
|
||||
duplicate_filter: bool = False,
|
||||
use_consolidated: bool = True
|
||||
) -> tuple[Self, int]:
|
||||
# Convert theme name to theme ID for filtering
|
||||
theme_id_filter = None
|
||||
if theme_filter:
|
||||
theme_id_filter = self._theme_name_to_id(theme_filter)
|
||||
|
||||
# Check if any filters are applied
|
||||
has_filters = any([status_filter, theme_id_filter, owner_filter, purchase_location_filter, storage_filter, tag_filter, year_filter, duplicate_filter])
|
||||
|
||||
# Prepare filter context
|
||||
filter_context = {
|
||||
'search_query': search_query,
|
||||
'status_filter': status_filter,
|
||||
'theme_filter': theme_id_filter, # Use converted theme ID
|
||||
'owner_filter': owner_filter,
|
||||
'purchase_location_filter': purchase_location_filter,
|
||||
'storage_filter': storage_filter,
|
||||
'tag_filter': tag_filter,
|
||||
'year_filter': year_filter,
|
||||
'duplicate_filter': duplicate_filter,
|
||||
'owners': BrickSetOwnerList.as_columns(),
|
||||
'statuses': BrickSetStatusList.as_columns(),
|
||||
'tags': BrickSetTagList.as_columns(),
|
||||
'owners_dict': BrickSetOwnerList.as_column_mapping(),
|
||||
'statuses_dict': BrickSetStatusList.as_column_mapping(),
|
||||
'tags_dict': BrickSetTagList.as_column_mapping(),
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Field mapping for sorting
|
||||
if use_consolidated:
|
||||
field_mapping = {
|
||||
'set': '"rebrickable_sets"."number", "rebrickable_sets"."version"',
|
||||
'name': '"rebrickable_sets"."name"',
|
||||
'year': '"rebrickable_sets"."year"',
|
||||
'parts': '"rebrickable_sets"."number_of_parts"',
|
||||
'theme': '"rebrickable_sets"."theme_id"',
|
||||
'minifigures': '"total_minifigures"',
|
||||
'missing': '"total_missing"',
|
||||
'damaged': '"total_damaged"',
|
||||
'instances': '"instance_count"', # New field for consolidated view
|
||||
'purchase-date': '"purchase_date"', # Use the MIN aggregated value
|
||||
'purchase-price': '"purchase_price"' # Use the MIN aggregated value
|
||||
}
|
||||
else:
|
||||
field_mapping = {
|
||||
'set': '"rebrickable_sets"."number", "rebrickable_sets"."version"',
|
||||
'name': '"rebrickable_sets"."name"',
|
||||
'year': '"rebrickable_sets"."year"',
|
||||
'parts': '"rebrickable_sets"."number_of_parts"',
|
||||
'theme': '"rebrickable_sets"."theme_id"',
|
||||
'minifigures': '"total_minifigures"', # Use the alias from the SQL query
|
||||
'missing': '"total_missing"', # Use the alias from the SQL query
|
||||
'damaged': '"total_damaged"', # Use the alias from the SQL query
|
||||
'purchase-date': '"bricktracker_sets"."purchase_date"',
|
||||
'purchase-price': '"bricktracker_sets"."purchase_price"'
|
||||
}
|
||||
|
||||
# Choose query based on consolidation preference and filter complexity
|
||||
# Owner/tag filters still need to fall back to non-consolidated for now
|
||||
# due to complex aggregation requirements
|
||||
complex_filters = [owner_filter, tag_filter]
|
||||
if use_consolidated and not any(complex_filters):
|
||||
query_to_use = self.consolidated_query
|
||||
else:
|
||||
# Use filtered query when consolidation is disabled or complex filters applied
|
||||
query_to_use = 'set/list/all_filtered'
|
||||
|
||||
# Handle instructions filtering
|
||||
if status_filter in ['has-missing-instructions', '-has-missing-instructions']:
|
||||
# For instructions filter, we need to load all sets first, then filter and paginate
|
||||
return self._all_filtered_paginated_with_instructions(
|
||||
search_query, page, per_page, sort_field, sort_order,
|
||||
status_filter, theme_id_filter, owner_filter,
|
||||
purchase_location_filter, storage_filter, tag_filter
|
||||
)
|
||||
|
||||
# Handle special case for set sorting with multiple columns
|
||||
if sort_field == 'set' and field_mapping:
|
||||
# Create custom order clause for set sorting
|
||||
direction = 'DESC' if sort_order.lower() == 'desc' else 'ASC'
|
||||
custom_order = f'"rebrickable_sets"."number" {direction}, "rebrickable_sets"."version" {direction}'
|
||||
filter_context['order'] = custom_order
|
||||
# Remove set from field mapping to avoid double-processing
|
||||
field_mapping_copy = field_mapping.copy()
|
||||
field_mapping_copy.pop('set', None)
|
||||
field_mapping = field_mapping_copy
|
||||
sort_field = None # Disable automatic ORDER BY construction
|
||||
|
||||
# Normal SQL-based filtering and pagination
|
||||
result, total_count = self.paginate(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
sort_field=sort_field,
|
||||
sort_order=sort_order,
|
||||
list_query=query_to_use,
|
||||
field_mapping=field_mapping,
|
||||
**filter_context
|
||||
)
|
||||
|
||||
# Populate themes and years for filter dropdown from filtered dataset (not just current page)
|
||||
# For themes dropdown, exclude theme_filter to show ALL available themes
|
||||
themes_context = filter_context.copy()
|
||||
themes_context.pop('theme_filter', None)
|
||||
result._populate_themes_from_filtered_dataset(
|
||||
query_to_use,
|
||||
**themes_context
|
||||
)
|
||||
# For years dropdown, exclude ALL filters to show ALL available years
|
||||
years_context = {
|
||||
'search_query': filter_context.get('search_query'),
|
||||
}
|
||||
result._populate_years_from_filtered_dataset(
|
||||
query_to_use,
|
||||
**years_context
|
||||
)
|
||||
|
||||
return result, total_count
|
||||
|
||||
def _populate_themes(self) -> None:
|
||||
"""Populate themes list from the current records"""
|
||||
themes = set()
|
||||
for record in self.records:
|
||||
if hasattr(record, 'theme') and hasattr(record.theme, 'name'):
|
||||
themes.add(record.theme.name)
|
||||
|
||||
self.themes = list(themes)
|
||||
self.themes.sort()
|
||||
|
||||
def _populate_years(self) -> None:
|
||||
"""Populate years list from the current records"""
|
||||
years = set()
|
||||
for record in self.records:
|
||||
if hasattr(record, 'fields') and hasattr(record.fields, 'year') and record.fields.year:
|
||||
years.add(record.fields.year)
|
||||
|
||||
self.years = list(years)
|
||||
self.years.sort(reverse=True) # Most recent years first
|
||||
|
||||
def _theme_name_to_id(self, theme_name_or_id: str) -> str | None:
|
||||
"""Convert a theme name or ID to theme ID for filtering"""
|
||||
try:
|
||||
# Check if the input is already a numeric theme ID
|
||||
if theme_name_or_id.isdigit():
|
||||
# Input is already a theme ID, validate it exists
|
||||
theme_list = BrickThemeList()
|
||||
theme_id = int(theme_name_or_id)
|
||||
if theme_id in theme_list.themes:
|
||||
return str(theme_id)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Input is a theme name, convert to ID
|
||||
from .sql import BrickSQL
|
||||
theme_list = BrickThemeList()
|
||||
|
||||
# Find all theme IDs that match the name
|
||||
matching_theme_ids = []
|
||||
for theme_id, theme in theme_list.themes.items():
|
||||
if theme.name.lower() == theme_name_or_id.lower():
|
||||
matching_theme_ids.append(str(theme_id))
|
||||
|
||||
if not matching_theme_ids:
|
||||
return None
|
||||
|
||||
# If only one match, return it
|
||||
if len(matching_theme_ids) == 1:
|
||||
return matching_theme_ids[0]
|
||||
|
||||
# Multiple matches - check which theme ID actually has sets in the user's collection
|
||||
sql = BrickSQL()
|
||||
for theme_id in matching_theme_ids:
|
||||
result = sql.fetchone(
|
||||
'set/check_theme_exists',
|
||||
theme_id=theme_id
|
||||
)
|
||||
count = result['count'] if result else 0
|
||||
if count > 0:
|
||||
return theme_id
|
||||
|
||||
# If none have sets, return the first match (fallback)
|
||||
return matching_theme_ids[0]
|
||||
|
||||
except Exception:
|
||||
# If themes can't be loaded, return None to disable theme filtering
|
||||
return None
|
||||
|
||||
def _theme_id_to_name(self, theme_id: str) -> str | None:
|
||||
"""Convert a theme ID to theme name (lowercase) for dropdown display"""
|
||||
try:
|
||||
if not theme_id or not theme_id.isdigit():
|
||||
return None
|
||||
|
||||
from .theme_list import BrickThemeList
|
||||
theme_list = BrickThemeList()
|
||||
theme_id_int = int(theme_id)
|
||||
|
||||
if theme_id_int in theme_list.themes:
|
||||
return theme_list.themes[theme_id_int].name.lower()
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
# For debugging - log the exception
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning(f"Failed to convert theme ID {theme_id} to name: {e}")
|
||||
return None
|
||||
|
||||
def _all_filtered_paginated_with_instructions(
|
||||
self,
|
||||
search_query: str | None,
|
||||
page: int,
|
||||
per_page: int,
|
||||
sort_field: str | None,
|
||||
sort_order: str,
|
||||
status_filter: str,
|
||||
theme_id_filter: str | None,
|
||||
owner_filter: str | None,
|
||||
purchase_location_filter: str | None,
|
||||
storage_filter: str | None,
|
||||
tag_filter: str | None
|
||||
) -> tuple[Self, int]:
|
||||
"""Handle filtering when instructions filter is involved"""
|
||||
try:
|
||||
# Load all sets first (without pagination) with full metadata
|
||||
all_sets = BrickSetList()
|
||||
filter_context = {
|
||||
'owners': BrickSetOwnerList.as_columns(),
|
||||
'statuses': BrickSetStatusList.as_columns(),
|
||||
'tags': BrickSetTagList.as_columns(),
|
||||
}
|
||||
all_sets.list(do_theme=True, **filter_context)
|
||||
|
||||
# Load instructions list
|
||||
instructions_list = BrickInstructionsList()
|
||||
instruction_sets = set(instructions_list.sets.keys())
|
||||
|
||||
# Apply all filters manually
|
||||
filtered_records = []
|
||||
for record in all_sets.records:
|
||||
# Apply instructions filter
|
||||
set_id = record.fields.set
|
||||
has_instructions = set_id in instruction_sets
|
||||
|
||||
if status_filter == 'has-missing-instructions' and has_instructions:
|
||||
continue # Skip sets that have instructions
|
||||
elif status_filter == '-has-missing-instructions' and not has_instructions:
|
||||
continue # Skip sets that don't have instructions
|
||||
|
||||
# Apply other filters manually
|
||||
if search_query and not self._matches_search(record, search_query):
|
||||
continue
|
||||
if theme_id_filter and not self._matches_theme(record, theme_id_filter):
|
||||
continue
|
||||
if owner_filter and not self._matches_owner(record, owner_filter):
|
||||
continue
|
||||
if purchase_location_filter and not self._matches_purchase_location(record, purchase_location_filter):
|
||||
continue
|
||||
if storage_filter and not self._matches_storage(record, storage_filter):
|
||||
continue
|
||||
if tag_filter and not self._matches_tag(record, tag_filter):
|
||||
continue
|
||||
|
||||
filtered_records.append(record)
|
||||
|
||||
# Apply sorting
|
||||
if sort_field:
|
||||
filtered_records = self._sort_records(filtered_records, sort_field, sort_order)
|
||||
|
||||
# Calculate pagination
|
||||
total_count = len(filtered_records)
|
||||
start_index = (page - 1) * per_page
|
||||
end_index = start_index + per_page
|
||||
paginated_records = filtered_records[start_index:end_index]
|
||||
|
||||
# Create result
|
||||
result = BrickSetList()
|
||||
result.records = paginated_records
|
||||
|
||||
# Copy themes and years from the source that has all sets
|
||||
result.themes = all_sets.themes if hasattr(all_sets, 'themes') else []
|
||||
result.years = all_sets.years if hasattr(all_sets, 'years') else []
|
||||
|
||||
# If themes or years weren't populated, populate them from current records
|
||||
if not result.themes:
|
||||
result._populate_themes()
|
||||
if not result.years:
|
||||
result._populate_years()
|
||||
|
||||
return result, total_count
|
||||
|
||||
except Exception:
|
||||
# Fall back to normal pagination without instructions filter
|
||||
return self.all_filtered_paginated(
|
||||
search_query, page, per_page, sort_field, sort_order,
|
||||
None, theme_id_filter, owner_filter,
|
||||
purchase_location_filter, storage_filter, tag_filter
|
||||
)
|
||||
|
||||
def _populate_years_from_filtered_dataset(self, query_name: str, **filter_context) -> None:
|
||||
"""Populate years list from all available records in filtered dataset"""
|
||||
try:
|
||||
# Use a simplified query to get just distinct years
|
||||
years_context = dict(filter_context)
|
||||
years_context.pop('limit', None)
|
||||
years_context.pop('offset', None)
|
||||
|
||||
# Use a special lightweight query for years
|
||||
year_records = super().select(
|
||||
override_query='set/list/years_only',
|
||||
**years_context
|
||||
)
|
||||
|
||||
# Extract years from records
|
||||
years = set()
|
||||
for record in year_records:
|
||||
year = record['year'] if 'year' in record.keys() else None
|
||||
if year:
|
||||
years.add(year)
|
||||
|
||||
if years:
|
||||
self.years = list(years)
|
||||
self.years.sort(reverse=True) # Most recent years first
|
||||
else:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning("No years found in filtered dataset, falling back to current page")
|
||||
self._populate_years()
|
||||
except Exception as e:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Exception in _populate_years_from_filtered_dataset: {e}")
|
||||
self._populate_years()
|
||||
|
||||
def _populate_themes_from_filtered_dataset(self, query_name: str, **filter_context) -> None:
|
||||
"""Populate themes list from filtered dataset (all pages, not just current page)"""
|
||||
try:
|
||||
from .theme_list import BrickThemeList
|
||||
|
||||
# Use a simplified query to get just distinct theme_ids
|
||||
theme_context = dict(filter_context)
|
||||
theme_context.pop('limit', None)
|
||||
theme_context.pop('offset', None)
|
||||
|
||||
# Use a special lightweight query for themes
|
||||
theme_records = super().select(
|
||||
override_query='set/list/themes_only',
|
||||
**theme_context
|
||||
)
|
||||
|
||||
# Convert to theme names
|
||||
theme_list = BrickThemeList()
|
||||
themes = set()
|
||||
for record in theme_records:
|
||||
theme_id = record.get('theme_id')
|
||||
if theme_id:
|
||||
theme = theme_list.get(theme_id)
|
||||
if theme and hasattr(theme, 'name'):
|
||||
themes.add(theme.name)
|
||||
|
||||
self.themes = list(themes)
|
||||
self.themes.sort()
|
||||
|
||||
except Exception:
|
||||
# Fall back to simpler approach: get themes from ALL sets (ignoring filters)
|
||||
# This is better than showing only current page themes
|
||||
try:
|
||||
from .theme_list import BrickThemeList
|
||||
all_sets = BrickSetList()
|
||||
all_sets.list(do_theme=True)
|
||||
|
||||
themes = set()
|
||||
years = set()
|
||||
for record in all_sets.records:
|
||||
if hasattr(record, 'theme') and hasattr(record.theme, 'name'):
|
||||
themes.add(record.theme.name)
|
||||
if hasattr(record, 'fields') and hasattr(record.fields, 'year') and record.fields.year:
|
||||
years.add(record.fields.year)
|
||||
|
||||
self.themes = list(themes)
|
||||
self.themes.sort()
|
||||
self.years = list(years)
|
||||
self.years.sort(reverse=True)
|
||||
except Exception:
|
||||
# Final fallback to current page themes
|
||||
self._populate_themes()
|
||||
self._populate_years()
|
||||
|
||||
def _matches_search(self, record, search_query: str) -> bool:
|
||||
"""Check if record matches search query"""
|
||||
search_lower = search_query.lower()
|
||||
return (search_lower in record.fields.name.lower() or
|
||||
search_lower in record.fields.set.lower())
|
||||
|
||||
def _matches_theme(self, record, theme_id: str) -> bool:
|
||||
"""Check if record matches theme filter"""
|
||||
return str(record.fields.theme_id) == theme_id
|
||||
|
||||
def _matches_owner(self, record, owner_filter: str) -> bool:
|
||||
"""Check if record matches owner filter"""
|
||||
if not owner_filter.startswith('owner-'):
|
||||
return True
|
||||
|
||||
# Convert owner-uuid format to owner_uuid column name
|
||||
owner_column = owner_filter.replace('-', '_')
|
||||
|
||||
# Check if record has this owner attribute set to 1
|
||||
return hasattr(record.fields, owner_column) and getattr(record.fields, owner_column) == 1
|
||||
|
||||
def _matches_purchase_location(self, record, location_filter: str) -> bool:
|
||||
"""Check if record matches purchase location filter"""
|
||||
return record.fields.purchase_location == location_filter
|
||||
|
||||
def _matches_storage(self, record, storage_filter: str) -> bool:
|
||||
"""Check if record matches storage filter"""
|
||||
return record.fields.storage == storage_filter
|
||||
|
||||
def _matches_tag(self, record, tag_filter: str) -> bool:
|
||||
"""Check if record matches tag filter"""
|
||||
if not tag_filter.startswith('tag-'):
|
||||
return True
|
||||
|
||||
# Convert tag-uuid format to tag_uuid column name
|
||||
tag_column = tag_filter.replace('-', '_')
|
||||
|
||||
# Check if record has this tag attribute set to 1
|
||||
return hasattr(record.fields, tag_column) and getattr(record.fields, tag_column) == 1
|
||||
|
||||
def _sort_records(self, records, sort_field: str, sort_order: str):
|
||||
"""Sort records manually"""
|
||||
reverse = sort_order == 'desc'
|
||||
|
||||
if sort_field == 'set':
|
||||
return sorted(records, key=lambda r: self._set_sort_key(r.fields.set), reverse=reverse)
|
||||
elif sort_field == 'name':
|
||||
return sorted(records, key=lambda r: r.fields.name, reverse=reverse)
|
||||
elif sort_field == 'year':
|
||||
return sorted(records, key=lambda r: r.fields.year, reverse=reverse)
|
||||
elif sort_field == 'parts':
|
||||
return sorted(records, key=lambda r: r.fields.number_of_parts, reverse=reverse)
|
||||
# Add more sort fields as needed
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def _set_sort_key(self, set_number: str) -> tuple:
|
||||
"""Generate sort key for set numbers like '10121-1' -> (10121, 1)"""
|
||||
try:
|
||||
if '-' in set_number:
|
||||
main_part, version_part = set_number.split('-', 1)
|
||||
return (int(main_part), int(version_part))
|
||||
else:
|
||||
return (int(set_number), 0)
|
||||
except (ValueError, TypeError):
|
||||
# Fallback to string sorting if parsing fails
|
||||
return (float('inf'), set_number)
|
||||
|
||||
# Sets with a minifigure part damaged
|
||||
def damaged_minifigure(self, figure: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.figure = figure
|
||||
|
||||
# Load the sets from the database
|
||||
self.list(override_query=self.damaged_minifigure_query)
|
||||
|
||||
return self
|
||||
|
||||
# Sets with a part damaged
|
||||
def damaged_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the sets from the database
|
||||
self.list(override_query=self.damaged_part_query)
|
||||
|
||||
return self
|
||||
|
||||
@@ -70,103 +576,137 @@ class BrickSetList(BrickRecordList[BrickSet]):
|
||||
else:
|
||||
order = '"bricktracker_sets"."rowid" DESC'
|
||||
|
||||
for record in self.select(
|
||||
order=order,
|
||||
limit=limit,
|
||||
statuses=BrickSetCheckboxList().as_columns()
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
self.list(order=order, limit=limit)
|
||||
|
||||
return self
|
||||
|
||||
# Sets missing a minifigure
|
||||
def missing_minifigure(
|
||||
# Base set list
|
||||
def list(
|
||||
self,
|
||||
fig_num: str,
|
||||
/
|
||||
) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.fig_num = fig_num
|
||||
/,
|
||||
*,
|
||||
override_query: str | None = None,
|
||||
order: str | None = None,
|
||||
limit: int | None = None,
|
||||
do_theme: bool = False,
|
||||
**context: Any,
|
||||
) -> None:
|
||||
themes = set()
|
||||
years = set()
|
||||
|
||||
if order is None:
|
||||
order = self.order
|
||||
|
||||
# Load the sets from the database
|
||||
for record in self.select(
|
||||
override_query=self.missing_minifigure_query,
|
||||
order=self.order
|
||||
for record in super().select(
|
||||
override_query=override_query,
|
||||
order=order,
|
||||
limit=limit,
|
||||
**context
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
if do_theme:
|
||||
themes.add(brickset.theme.name)
|
||||
if hasattr(brickset, 'fields') and hasattr(brickset.fields, 'year') and brickset.fields.year:
|
||||
years.add(brickset.fields.year)
|
||||
|
||||
# Convert the set into a list and sort it
|
||||
if do_theme:
|
||||
self.themes = list(themes)
|
||||
self.themes.sort()
|
||||
self.years = list(years)
|
||||
self.years.sort(reverse=True) # Most recent years first
|
||||
|
||||
# Sets missing a minifigure part
|
||||
def missing_minifigure(self, figure: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.figure = figure
|
||||
|
||||
# Load the sets from the database
|
||||
self.list(override_query=self.missing_minifigure_query)
|
||||
|
||||
return self
|
||||
|
||||
# Sets missing a part
|
||||
def missing_part(
|
||||
self,
|
||||
part_num: str,
|
||||
color_id: int,
|
||||
/,
|
||||
*,
|
||||
element_id: int | None = None,
|
||||
) -> Self:
|
||||
def missing_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part_num = part_num
|
||||
self.fields.color_id = color_id
|
||||
self.fields.element_id = element_id
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the sets from the database
|
||||
for record in self.select(
|
||||
override_query=self.missing_part_query,
|
||||
order=self.order
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
self.list(override_query=self.missing_part_query)
|
||||
|
||||
return self
|
||||
|
||||
# Sets using a minifigure
|
||||
def using_minifigure(
|
||||
self,
|
||||
fig_num: str,
|
||||
/
|
||||
) -> Self:
|
||||
def using_minifigure(self, figure: str, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.fig_num = fig_num
|
||||
self.fields.figure = figure
|
||||
|
||||
# Load the sets from the database
|
||||
for record in self.select(
|
||||
override_query=self.using_minifigure_query,
|
||||
order=self.order
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
self.list(override_query=self.using_minifigure_query)
|
||||
|
||||
return self
|
||||
|
||||
# Sets using a part
|
||||
def using_part(
|
||||
self,
|
||||
part_num: str,
|
||||
color_id: int,
|
||||
/,
|
||||
*,
|
||||
element_id: int | None = None,
|
||||
) -> Self:
|
||||
def using_part(self, part: str, color: int, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.part_num = part_num
|
||||
self.fields.color_id = color_id
|
||||
self.fields.element_id = element_id
|
||||
self.fields.part = part
|
||||
self.fields.color = color
|
||||
|
||||
# Load the sets from the database
|
||||
for record in self.select(
|
||||
override_query=self.using_part_query,
|
||||
order=self.order
|
||||
):
|
||||
brickset = BrickSet(record=record)
|
||||
|
||||
self.records.append(brickset)
|
||||
self.list(override_query=self.using_part_query)
|
||||
|
||||
return self
|
||||
|
||||
# Sets using a storage
|
||||
def using_storage(self, storage: BrickSetStorage, /) -> Self:
|
||||
# Save the parameters to the fields
|
||||
self.fields.storage = storage.fields.id
|
||||
|
||||
# Load the sets from the database
|
||||
self.list(override_query=self.using_storage_query)
|
||||
|
||||
return self
|
||||
|
||||
def without_storage(self, /) -> Self:
|
||||
# Load sets with no storage
|
||||
self.list(override_query=self.without_storage_query)
|
||||
|
||||
return self
|
||||
|
||||
|
||||
# Helper to build the metadata lists
|
||||
def set_metadata_lists(
|
||||
as_class: bool = False,
|
||||
hardcoded_statuses_only: bool = False
|
||||
) -> dict[
|
||||
str,
|
||||
Union[
|
||||
list[BrickSetOwner],
|
||||
list[BrickSetPurchaseLocation],
|
||||
BrickSetPurchaseLocation,
|
||||
list[BrickSetStorage],
|
||||
BrickSetStorageList,
|
||||
list[BrickSetTag]
|
||||
]
|
||||
]:
|
||||
# Get all statuses
|
||||
all_statuses = BrickSetStatusList.list(all=True)
|
||||
|
||||
# Filter to only hardcoded statuses if requested (for individual minifigures)
|
||||
if hardcoded_statuses_only:
|
||||
hardcoded_status_ids = ['minifigures_collected', 'set_checked', 'set_collected']
|
||||
statuses = [s for s in all_statuses if s.fields.id in hardcoded_status_ids]
|
||||
else:
|
||||
statuses = all_statuses
|
||||
|
||||
return {
|
||||
'brickset_owners': BrickSetOwnerList.list(),
|
||||
'brickset_purchase_locations': BrickSetPurchaseLocationList.list(as_class=as_class), # noqa: E501
|
||||
'brickset_statuses': statuses,
|
||||
'brickset_storages': BrickSetStorageList.list(as_class=as_class),
|
||||
'brickset_tags': BrickSetTagList.list(),
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
from .metadata import BrickMetadata
|
||||
|
||||
|
||||
# Lego set owner metadata
|
||||
class BrickSetOwner(BrickMetadata):
|
||||
kind: str = 'owner'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_owner'
|
||||
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_owner'
|
||||
|
||||
# Queries
|
||||
delete_query: str = 'set/metadata/owner/delete'
|
||||
insert_query: str = 'set/metadata/owner/insert'
|
||||
select_query: str = 'set/metadata/owner/select'
|
||||
update_field_query: str = 'set/metadata/owner/update/field'
|
||||
update_set_state_query: str = 'set/metadata/owner/update/state'
|
||||
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/owner/update/state'
|
||||
@@ -0,0 +1,24 @@
|
||||
from typing import Self
|
||||
|
||||
from .metadata_list import BrickMetadataList
|
||||
from .set_owner import BrickSetOwner
|
||||
|
||||
|
||||
# Lego sets owner list
|
||||
class BrickSetOwnerList(BrickMetadataList[BrickSetOwner]):
|
||||
kind: str = 'set owners'
|
||||
|
||||
# Database
|
||||
table: str = 'bricktracker_set_owners'
|
||||
order: str = '"bricktracker_metadata_owners"."name"'
|
||||
|
||||
# Queries
|
||||
select_query = 'set/metadata/owner/list'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_owner'
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
return cls(BrickSetOwner, force=force)
|
||||
@@ -0,0 +1,17 @@
|
||||
from .metadata import BrickMetadata
|
||||
|
||||
|
||||
# Lego set purchase location metadata
|
||||
class BrickSetPurchaseLocation(BrickMetadata):
|
||||
kind: str = 'purchase location'
|
||||
|
||||
# Endpoints
|
||||
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_purchase_location'
|
||||
|
||||
# Queries
|
||||
delete_query: str = 'set/metadata/purchase_location/delete'
|
||||
insert_query: str = 'set/metadata/purchase_location/insert'
|
||||
select_query: str = 'set/metadata/purchase_location/select'
|
||||
update_field_query: str = 'set/metadata/purchase_location/update/field'
|
||||
update_set_value_query: str = 'set/metadata/purchase_location/update/value'
|
||||
update_individual_minifigure_value_query: str = 'individual_minifigure/metadata/purchase_location/update/value'
|
||||
@@ -0,0 +1,45 @@
|
||||
from typing import Self
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .metadata_list import BrickMetadataList
|
||||
from .set_purchase_location import BrickSetPurchaseLocation
|
||||
|
||||
|
||||
# Lego sets purchase location list
|
||||
class BrickSetPurchaseLocationList(
|
||||
BrickMetadataList[BrickSetPurchaseLocation]
|
||||
):
|
||||
kind: str = 'set purchase locations'
|
||||
|
||||
# Order
|
||||
order: str = '"bricktracker_metadata_purchase_locations"."name"'
|
||||
|
||||
# Queries
|
||||
select_query: str = 'set/metadata/purchase_location/list'
|
||||
all_query: str = 'set/metadata/purchase_location/all'
|
||||
|
||||
# Set value endpoint
|
||||
set_value_endpoint: str = 'set.update_purchase_location'
|
||||
|
||||
# Individual minifigure value endpoint
|
||||
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_purchase_location'
|
||||
|
||||
# Load all purchase locations
|
||||
@classmethod
|
||||
def all(cls, /) -> Self:
|
||||
new = cls.new()
|
||||
new.override()
|
||||
|
||||
for record in new.select(
|
||||
override_query=cls.all_query,
|
||||
order=current_app.config['PURCHASE_LOCATION_DEFAULT_ORDER']
|
||||
):
|
||||
new.records.append(new.model(record=record))
|
||||
|
||||
return new
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
return cls(BrickSetPurchaseLocation, force=force)
|
||||
@@ -0,0 +1,36 @@
|
||||
from typing import Self
|
||||
|
||||
from .metadata import BrickMetadata
|
||||
|
||||
|
||||
# Lego set status metadata
|
||||
class BrickSetStatus(BrickMetadata):
|
||||
kind: str = 'status'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_status'
|
||||
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_status'
|
||||
|
||||
# Queries
|
||||
delete_query: str = 'set/metadata/status/delete'
|
||||
insert_query: str = 'set/metadata/status/insert'
|
||||
select_query: str = 'set/metadata/status/select'
|
||||
update_field_query: str = 'set/metadata/status/update/field'
|
||||
update_set_state_query: str = 'set/metadata/status/update/state'
|
||||
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/status/update/state'
|
||||
|
||||
# Grab data from a form
|
||||
def from_form(self, form: dict[str, str], /) -> Self:
|
||||
super().from_form(form)
|
||||
|
||||
grid = form.get('grid', None)
|
||||
|
||||
self.fields.displayed_on_grid = grid == 'on'
|
||||
|
||||
return self
|
||||
|
||||
# Insert into database
|
||||
def insert(self, /, **_) -> None:
|
||||
super().insert(
|
||||
displayed_on_grid=self.fields.displayed_on_grid
|
||||
)
|
||||
@@ -0,0 +1,33 @@
|
||||
from typing import Self
|
||||
|
||||
from .metadata_list import BrickMetadataList
|
||||
from .set_status import BrickSetStatus
|
||||
|
||||
|
||||
# Lego sets status list
|
||||
class BrickSetStatusList(BrickMetadataList[BrickSetStatus]):
|
||||
kind: str = 'set statuses'
|
||||
|
||||
# Database
|
||||
table: str = 'bricktracker_set_statuses'
|
||||
order: str = '"bricktracker_metadata_statuses"."name"'
|
||||
|
||||
# Queries
|
||||
select_query = 'set/metadata/status/list'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_status'
|
||||
|
||||
# Filter the list of set status
|
||||
def filter(self, all: bool = False) -> list[BrickSetStatus]:
|
||||
return [
|
||||
record
|
||||
for record
|
||||
in self.records
|
||||
if all or record.fields.displayed_on_grid
|
||||
]
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
return cls(BrickSetStatus, force=force)
|
||||
@@ -0,0 +1,26 @@
|
||||
from .metadata import BrickMetadata
|
||||
|
||||
from flask import url_for
|
||||
|
||||
|
||||
# Lego set storage metadata
|
||||
class BrickSetStorage(BrickMetadata):
|
||||
kind: str = 'storage'
|
||||
|
||||
# Endpoints
|
||||
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_storage'
|
||||
|
||||
# Queries
|
||||
delete_query: str = 'set/metadata/storage/delete'
|
||||
insert_query: str = 'set/metadata/storage/insert'
|
||||
select_query: str = 'set/metadata/storage/select'
|
||||
update_field_query: str = 'set/metadata/storage/update/field'
|
||||
update_set_value_query: str = 'set/metadata/storage/update/value'
|
||||
update_individual_minifigure_value_query: str = 'individual_minifigure/metadata/storage/update/value'
|
||||
|
||||
# Self url
|
||||
def url(self, /) -> str:
|
||||
return url_for(
|
||||
'storage.details',
|
||||
id=self.fields.id,
|
||||
)
|
||||
@@ -0,0 +1,43 @@
|
||||
from typing import Self
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from .metadata_list import BrickMetadataList
|
||||
from .set_storage import BrickSetStorage
|
||||
|
||||
|
||||
# Lego sets storage list
|
||||
class BrickSetStorageList(BrickMetadataList[BrickSetStorage]):
|
||||
kind: str = 'set storages'
|
||||
|
||||
# Order
|
||||
order: str = '"bricktracker_metadata_storages"."name"'
|
||||
|
||||
# Queries
|
||||
select_query: str = 'set/metadata/storage/list'
|
||||
all_query: str = 'set/metadata/storage/all'
|
||||
|
||||
# Set value endpoint
|
||||
set_value_endpoint: str = 'set.update_storage'
|
||||
|
||||
# Individual minifigure value endpoint
|
||||
individual_minifigure_value_endpoint: str = 'individual_minifigure.update_storage'
|
||||
|
||||
# Load all storages
|
||||
@classmethod
|
||||
def all(cls, /) -> Self:
|
||||
new = cls.new()
|
||||
new.override()
|
||||
|
||||
for record in new.select(
|
||||
override_query=cls.all_query,
|
||||
order=current_app.config['STORAGE_DEFAULT_ORDER']
|
||||
):
|
||||
new.records.append(new.model(record=record))
|
||||
|
||||
return new
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
return cls(BrickSetStorage, force=force)
|
||||
@@ -0,0 +1,18 @@
|
||||
from .metadata import BrickMetadata
|
||||
|
||||
|
||||
# Lego set tag metadata
|
||||
class BrickSetTag(BrickMetadata):
|
||||
kind: str = 'tag'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_tag'
|
||||
individual_minifigure_state_endpoint: str = 'individual_minifigure.update_tag'
|
||||
|
||||
# Queries
|
||||
delete_query: str = 'set/metadata/tag/delete'
|
||||
insert_query: str = 'set/metadata/tag/insert'
|
||||
select_query: str = 'set/metadata/tag/select'
|
||||
update_field_query: str = 'set/metadata/tag/update/field'
|
||||
update_set_state_query: str = 'set/metadata/tag/update/state'
|
||||
update_individual_minifigure_state_query: str = 'individual_minifigure/metadata/tag/update/state'
|
||||
@@ -0,0 +1,24 @@
|
||||
from typing import Self
|
||||
|
||||
from .metadata_list import BrickMetadataList
|
||||
from .set_tag import BrickSetTag
|
||||
|
||||
|
||||
# Lego sets tag list
|
||||
class BrickSetTagList(BrickMetadataList[BrickSetTag]):
|
||||
kind: str = 'set tags'
|
||||
|
||||
# Database
|
||||
table: str = 'bricktracker_set_tags'
|
||||
order: str = '"bricktracker_metadata_tags"."name"'
|
||||
|
||||
# Queries
|
||||
select_query: str = 'set/metadata/tag/list'
|
||||
|
||||
# Endpoints
|
||||
set_state_endpoint: str = 'set.update_tag'
|
||||
|
||||
# Instantiate the list with the proper class
|
||||
@classmethod
|
||||
def new(cls, /, *, force: bool = False) -> Self:
|
||||
return cls(BrickSetTag, force=force)
|
||||
+117
-59
@@ -1,14 +1,15 @@
|
||||
import logging
|
||||
from typing import Any, Final, Tuple
|
||||
|
||||
from flask import copy_current_request_context, Flask, request
|
||||
from flask import Flask, request
|
||||
from flask_socketio import SocketIO
|
||||
|
||||
from .configuration_list import BrickConfigurationList
|
||||
from .instructions import BrickInstructions
|
||||
from .instructions_list import BrickInstructionsList
|
||||
from .login import LoginManager
|
||||
from .peeron_instructions import PeeronInstructions, PeeronPage
|
||||
from .peeron_pdf import PeeronPDF
|
||||
from .set import BrickSet
|
||||
from .socket_decorator import authenticated_socket, rebrickable_socket
|
||||
from .sql import close as sql_close
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -19,9 +20,14 @@ MESSAGES: Final[dict[str, str]] = {
|
||||
'CONNECT': 'connect',
|
||||
'DISCONNECT': 'disconnect',
|
||||
'DOWNLOAD_INSTRUCTIONS': 'download_instructions',
|
||||
'DOWNLOAD_PEERON_PAGES': 'download_peeron_pages',
|
||||
'FAIL': 'fail',
|
||||
'IMPORT_MINIFIGURE': 'import_minifigure',
|
||||
'IMPORT_SET': 'import_set',
|
||||
'LOAD_MINIFIGURE': 'load_minifigure',
|
||||
'LOAD_PEERON_PAGES': 'load_peeron_pages',
|
||||
'LOAD_SET': 'load_set',
|
||||
'MINIFIGURE_LOADED': 'minifigure_loaded',
|
||||
'PROGRESS': 'progress',
|
||||
'SET_LOADED': 'set_loaded',
|
||||
}
|
||||
@@ -71,7 +77,7 @@ class BrickSocket(object):
|
||||
*args,
|
||||
**kwargs,
|
||||
path=app.config['SOCKET_PATH'],
|
||||
async_mode='eventlet',
|
||||
async_mode='gevent',
|
||||
)
|
||||
|
||||
# Store the socket in the app config
|
||||
@@ -87,12 +93,8 @@ class BrickSocket(object):
|
||||
self.disconnected()
|
||||
|
||||
@self.socket.on(MESSAGES['DOWNLOAD_INSTRUCTIONS'], namespace=self.namespace) # noqa: E501
|
||||
@authenticated_socket(self)
|
||||
def download_instructions(data: dict[str, Any], /) -> None:
|
||||
# Needs to be authenticated
|
||||
if LoginManager.is_not_authenticated():
|
||||
self.fail(message='You need to be authenticated')
|
||||
return
|
||||
|
||||
instructions = BrickInstructions(
|
||||
'{name}.pdf'.format(name=data.get('alt', '')),
|
||||
socket=self
|
||||
@@ -107,71 +109,127 @@ class BrickSocket(object):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Start it in a thread if requested
|
||||
if self.threaded:
|
||||
@copy_current_request_context
|
||||
def do_download() -> None:
|
||||
instructions.download(path)
|
||||
instructions.download(path)
|
||||
|
||||
BrickInstructionsList(force=True)
|
||||
BrickInstructionsList(force=True)
|
||||
|
||||
self.socket.start_background_task(do_download)
|
||||
else:
|
||||
instructions.download(path)
|
||||
@self.socket.on(MESSAGES['LOAD_PEERON_PAGES'], namespace=self.namespace) # noqa: E501
|
||||
def load_peeron_pages(data: dict[str, Any], /) -> None:
|
||||
logger.debug('Socket: LOAD_PEERON_PAGES={data} (from: {fr})'.format(
|
||||
data=data, fr=request.remote_addr))
|
||||
|
||||
try:
|
||||
set_number = data.get('set', '')
|
||||
if not set_number:
|
||||
self.fail(message="Set number is required")
|
||||
return
|
||||
|
||||
# Create Peeron instructions instance with socket for progress reporting
|
||||
peeron = PeeronInstructions(set_number, socket=self)
|
||||
|
||||
# Find pages (this will report progress for thumbnail caching)
|
||||
pages = peeron.find_pages()
|
||||
|
||||
# Complete the operation (JavaScript will handle redirect)
|
||||
self.complete(message=f"Found {len(pages)} instruction pages on Peeron")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in load_peeron_pages: {e}")
|
||||
self.fail(message=f"Error loading Peeron pages: {e}")
|
||||
|
||||
@self.socket.on(MESSAGES['DOWNLOAD_PEERON_PAGES'], namespace=self.namespace) # noqa: E501
|
||||
@authenticated_socket(self)
|
||||
def download_peeron_pages(data: dict[str, Any], /) -> None:
|
||||
logger.debug('Socket: DOWNLOAD_PEERON_PAGES={data} (from: {fr})'.format(
|
||||
data=data,
|
||||
fr=request.sid, # type: ignore
|
||||
))
|
||||
|
||||
try:
|
||||
# Extract data from the request
|
||||
set_number = data.get('set', '')
|
||||
pages_data = data.get('pages', [])
|
||||
|
||||
if not set_number:
|
||||
raise ValueError("Set number is required")
|
||||
|
||||
if not pages_data:
|
||||
raise ValueError("No pages selected")
|
||||
|
||||
# Parse set number
|
||||
if '-' in set_number:
|
||||
parts = set_number.split('-', 1)
|
||||
set_num = parts[0]
|
||||
version_num = parts[1] if len(parts) > 1 else '1'
|
||||
else:
|
||||
set_num = set_number
|
||||
version_num = '1'
|
||||
|
||||
# Convert page data to PeeronPage objects
|
||||
pages = []
|
||||
for page_data in pages_data:
|
||||
page = PeeronPage(
|
||||
page_number=page_data.get('page_number', ''),
|
||||
original_image_url=page_data.get('original_image_url', ''),
|
||||
cached_full_image_path=page_data.get('cached_full_image_path', ''),
|
||||
cached_thumbnail_url='', # Not needed for PDF generation
|
||||
alt_text=page_data.get('alt_text', ''),
|
||||
rotation=page_data.get('rotation', 0)
|
||||
)
|
||||
pages.append(page)
|
||||
|
||||
# Create PDF generator and start download
|
||||
pdf_generator = PeeronPDF(set_num, version_num, pages, socket=self)
|
||||
pdf_generator.create_pdf()
|
||||
|
||||
# Note: Cache cleanup is handled automatically by pdf_generator.create_pdf()
|
||||
|
||||
# Refresh instructions list to include new PDF
|
||||
BrickInstructionsList(force=True)
|
||||
|
||||
@self.socket.on(MESSAGES['IMPORT_SET'], namespace=self.namespace)
|
||||
def import_set(data: dict[str, Any], /) -> None:
|
||||
# Needs to be authenticated
|
||||
if LoginManager.is_not_authenticated():
|
||||
self.fail(message='You need to be authenticated')
|
||||
return
|
||||
|
||||
# Needs the Rebrickable API key
|
||||
try:
|
||||
BrickConfigurationList.error_unless_is_set('REBRICKABLE_API_KEY') # noqa: E501
|
||||
except Exception as e:
|
||||
self.fail(message=str(e))
|
||||
return
|
||||
logger.error(f"Error in download_peeron_pages: {e}")
|
||||
self.fail(message=f"Error downloading Peeron pages: {e}")
|
||||
|
||||
brickset = BrickSet(socket=self)
|
||||
@self.socket.on(MESSAGES['IMPORT_SET'], namespace=self.namespace)
|
||||
@rebrickable_socket(self)
|
||||
def import_set(data: dict[str, Any], /) -> None:
|
||||
logger.debug('Socket: IMPORT_SET={data} (from: {fr})'.format(
|
||||
data=data,
|
||||
fr=request.sid, # type: ignore
|
||||
))
|
||||
|
||||
# Start it in a thread if requested
|
||||
if self.threaded:
|
||||
@copy_current_request_context
|
||||
def do_download() -> None:
|
||||
brickset.download(data)
|
||||
|
||||
self.socket.start_background_task(do_download)
|
||||
else:
|
||||
brickset.download(data)
|
||||
BrickSet().download(self, data)
|
||||
|
||||
@self.socket.on(MESSAGES['LOAD_SET'], namespace=self.namespace)
|
||||
def load_set(data: dict[str, Any], /) -> None:
|
||||
# Needs to be authenticated
|
||||
if LoginManager.is_not_authenticated():
|
||||
self.fail(message='You need to be authenticated')
|
||||
return
|
||||
logger.debug('Socket: LOAD_SET={data} (from: {fr})'.format(
|
||||
data=data,
|
||||
fr=request.sid, # type: ignore
|
||||
))
|
||||
|
||||
# Needs the Rebrickable API key
|
||||
try:
|
||||
BrickConfigurationList.error_unless_is_set('REBRICKABLE_API_KEY') # noqa: E501
|
||||
except Exception as e:
|
||||
self.fail(message=str(e))
|
||||
return
|
||||
BrickSet().load(self, data)
|
||||
|
||||
brickset = BrickSet(socket=self)
|
||||
@self.socket.on(MESSAGES['IMPORT_MINIFIGURE'], namespace=self.namespace)
|
||||
@rebrickable_socket(self)
|
||||
def import_minifigure(data: dict[str, Any], /) -> None:
|
||||
logger.debug('Socket: IMPORT_MINIFIGURE={data} (from: {fr})'.format(
|
||||
data=data,
|
||||
fr=request.sid, # type: ignore
|
||||
))
|
||||
|
||||
# Start it in a thread if requested
|
||||
if self.threaded:
|
||||
@copy_current_request_context
|
||||
def do_load() -> None:
|
||||
brickset.load(data)
|
||||
from .individual_minifigure import IndividualMinifigure
|
||||
IndividualMinifigure().download(self, data)
|
||||
|
||||
self.socket.start_background_task(do_load)
|
||||
else:
|
||||
brickset.load(data)
|
||||
@self.socket.on(MESSAGES['LOAD_MINIFIGURE'], namespace=self.namespace)
|
||||
def load_minifigure(data: dict[str, Any], /) -> None:
|
||||
logger.debug('Socket: LOAD_MINIFIGURE={data} (from: {fr})'.format(
|
||||
data=data,
|
||||
fr=request.sid, # type: ignore
|
||||
))
|
||||
|
||||
from .individual_minifigure import IndividualMinifigure
|
||||
IndividualMinifigure().load(self, data)
|
||||
|
||||
# Update the progress auto-incrementing
|
||||
def auto_progress(
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
from functools import wraps
|
||||
from threading import Thread
|
||||
from typing import Callable, ParamSpec, TYPE_CHECKING, Union
|
||||
|
||||
from flask import copy_current_request_context
|
||||
|
||||
from .configuration_list import BrickConfigurationList
|
||||
from .login import LoginManager
|
||||
if TYPE_CHECKING:
|
||||
from .socket import BrickSocket
|
||||
|
||||
# What a threaded function can return (None or Thread)
|
||||
SocketReturn = Union[None, Thread]
|
||||
|
||||
# Threaded signature (*arg, **kwargs -> (None or Thread)
|
||||
P = ParamSpec('P')
|
||||
SocketCallable = Callable[P, SocketReturn]
|
||||
|
||||
|
||||
# Fail if not authenticated
|
||||
def authenticated_socket(
|
||||
self: 'BrickSocket',
|
||||
/,
|
||||
*,
|
||||
threaded: bool = True,
|
||||
) -> Callable[[SocketCallable], SocketCallable]:
|
||||
def outer(function: SocketCallable, /) -> SocketCallable:
|
||||
@wraps(function)
|
||||
def wrapper(*args, **kwargs) -> SocketReturn:
|
||||
# Needs to be authenticated
|
||||
if LoginManager.is_not_authenticated():
|
||||
self.fail(message='You need to be authenticated')
|
||||
return
|
||||
|
||||
# Apply threading
|
||||
if threaded:
|
||||
return threaded_socket(self)(function)(*args, **kwargs)
|
||||
else:
|
||||
return function(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return outer
|
||||
|
||||
|
||||
# Fail if not ready for Rebrickable (authenticated, API key)
|
||||
# Automatically makes it threaded
|
||||
def rebrickable_socket(
|
||||
self: 'BrickSocket',
|
||||
/,
|
||||
*,
|
||||
threaded: bool = True,
|
||||
) -> Callable[[SocketCallable], SocketCallable]:
|
||||
def outer(function: SocketCallable, /) -> SocketCallable:
|
||||
@wraps(function)
|
||||
# Automatically authenticated
|
||||
@authenticated_socket(self, threaded=False)
|
||||
def wrapper(*args, **kwargs) -> SocketReturn:
|
||||
# Needs the Rebrickable API key
|
||||
try:
|
||||
BrickConfigurationList.error_unless_is_set('REBRICKABLE_API_KEY') # noqa: E501
|
||||
except Exception as e:
|
||||
self.fail(message=str(e))
|
||||
return
|
||||
|
||||
# Apply threading
|
||||
if threaded:
|
||||
return threaded_socket(self)(function)(*args, **kwargs)
|
||||
else:
|
||||
return function(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return outer
|
||||
|
||||
|
||||
# Start the function in a thread if the socket is threaded
|
||||
def threaded_socket(
|
||||
self: 'BrickSocket',
|
||||
/
|
||||
) -> Callable[[SocketCallable], SocketCallable]:
|
||||
def outer(function: SocketCallable, /) -> SocketCallable:
|
||||
@wraps(function)
|
||||
def wrapper(*args, **kwargs) -> SocketReturn:
|
||||
# Start it in a thread if requested
|
||||
if self.threaded:
|
||||
@copy_current_request_context
|
||||
def do_function() -> None:
|
||||
function(*args, **kwargs)
|
||||
|
||||
return self.socket.start_background_task(do_function)
|
||||
else:
|
||||
return function(*args, **kwargs)
|
||||
return wrapper
|
||||
return outer
|
||||
+55
-1
@@ -1,3 +1,4 @@
|
||||
from importlib import import_module
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
@@ -59,6 +60,29 @@ class BrickSQL(object):
|
||||
# Grab a cursor
|
||||
self.cursor = self.connection.cursor()
|
||||
|
||||
# SQLite Performance Optimizations
|
||||
logger.debug('SQLite3: applying performance optimizations')
|
||||
|
||||
# Enable WAL (Write-Ahead Logging) mode for better concurrency
|
||||
# Allows multiple readers while writer is active
|
||||
self.connection.execute('PRAGMA journal_mode=WAL')
|
||||
|
||||
# Increase cache size for better query performance
|
||||
# Default is 2000 pages, increase to 10000 pages (~40MB for 4KB pages)
|
||||
self.connection.execute('PRAGMA cache_size=10000')
|
||||
|
||||
# Store temporary tables and indices in memory for speed
|
||||
self.connection.execute('PRAGMA temp_store=memory')
|
||||
|
||||
# Enable foreign key constraints (good practice)
|
||||
self.connection.execute('PRAGMA foreign_keys=ON')
|
||||
|
||||
# Optimize for read performance (trade write speed for read speed)
|
||||
self.connection.execute('PRAGMA synchronous=NORMAL')
|
||||
|
||||
# Analyze database statistics for better query planning
|
||||
self.connection.execute('ANALYZE')
|
||||
|
||||
# Grab the version and check
|
||||
try:
|
||||
version = self.fetchone('schema/get_version')
|
||||
@@ -301,7 +325,37 @@ class BrickSQL(object):
|
||||
version=pending.version)
|
||||
)
|
||||
|
||||
self.executescript(pending.get_query())
|
||||
# Load context from the migrations if it exists
|
||||
# It looks for a file in migrations/ named after the SQL file
|
||||
# and containing one function named migration_xxxx, also named
|
||||
# after the SQL file, returning a context dict.
|
||||
#
|
||||
# For instance:
|
||||
# - sql/migrations/0007.sql
|
||||
# - migrations/0007.py
|
||||
# - def migration_0007(BrickSQL) -> dict[str, Any]
|
||||
try:
|
||||
module = import_module(
|
||||
'.migrations.{name}'.format(
|
||||
name=pending.name
|
||||
),
|
||||
package='bricktracker'
|
||||
)
|
||||
except Exception:
|
||||
module = None
|
||||
|
||||
# If a module has been loaded, we need to fail if an error
|
||||
# occured while executing the migration function
|
||||
if module is not None:
|
||||
function = getattr(module, 'migration_{name}'.format(
|
||||
name=pending.name
|
||||
))
|
||||
|
||||
context: dict[str, Any] = function(self)
|
||||
else:
|
||||
context: dict[str, Any] = {}
|
||||
|
||||
self.executescript(pending.get_query(), **context)
|
||||
self.execute('schema/set_version', version=pending.version)
|
||||
|
||||
# Tells whether the database needs upgrade
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
SELECT
|
||||
"bricktracker_set_checkboxes"."id",
|
||||
"bricktracker_set_checkboxes"."name",
|
||||
"bricktracker_set_checkboxes"."displayed_on_grid"
|
||||
FROM "bricktracker_set_checkboxes"
|
||||
|
||||
{% block where %}{% endblock %}
|
||||
@@ -1,9 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE "bricktracker_set_statuses"
|
||||
DROP COLUMN "status_{{ id }}";
|
||||
|
||||
DELETE FROM "bricktracker_set_checkboxes"
|
||||
WHERE "bricktracker_set_checkboxes"."id" IS NOT DISTINCT FROM '{{ id }}';
|
||||
|
||||
COMMIT;
|
||||
@@ -1 +0,0 @@
|
||||
{% extends 'checkbox/base.sql' %}
|
||||
@@ -1,5 +0,0 @@
|
||||
{% extends 'checkbox/base.sql' %}
|
||||
|
||||
{% block where %}
|
||||
WHERE "bricktracker_set_checkboxes"."id" IS NOT DISTINCT FROM :id
|
||||
{% endblock %}
|
||||
@@ -1,3 +0,0 @@
|
||||
UPDATE "bricktracker_set_checkboxes"
|
||||
SET "name" = :safe_name
|
||||
WHERE "bricktracker_set_checkboxes"."id" IS NOT DISTINCT FROM :id
|
||||
@@ -1,3 +0,0 @@
|
||||
UPDATE "bricktracker_set_checkboxes"
|
||||
SET "{{name}}" = :status
|
||||
WHERE "bricktracker_set_checkboxes"."id" IS NOT DISTINCT FROM :id
|
||||
@@ -0,0 +1,19 @@
|
||||
-- Delete individual minifigure parts
|
||||
DELETE FROM "bricktracker_individual_minifigure_parts"
|
||||
WHERE "id" = :id;
|
||||
|
||||
-- Delete individual minifigure owners
|
||||
DELETE FROM "bricktracker_individual_minifigure_owners"
|
||||
WHERE "id" = :id;
|
||||
|
||||
-- Delete individual minifigure tags
|
||||
DELETE FROM "bricktracker_individual_minifigure_tags"
|
||||
WHERE "id" = :id;
|
||||
|
||||
-- Delete individual minifigure statuses
|
||||
DELETE FROM "bricktracker_individual_minifigure_statuses"
|
||||
WHERE "id" = :id;
|
||||
|
||||
-- Delete the individual minifigure itself
|
||||
DELETE FROM "bricktracker_individual_minifigures"
|
||||
WHERE "id" = :id;
|
||||
@@ -0,0 +1,15 @@
|
||||
INSERT OR IGNORE INTO "bricktracker_individual_minifigures" (
|
||||
"id",
|
||||
"figure",
|
||||
"quantity",
|
||||
"description",
|
||||
"storage",
|
||||
"purchase_location"
|
||||
) VALUES (
|
||||
:id,
|
||||
:figure,
|
||||
:quantity,
|
||||
:description,
|
||||
:storage,
|
||||
:purchase_location
|
||||
)
|
||||
@@ -0,0 +1,48 @@
|
||||
-- Get all individual minifigure instances for a specific storage location
|
||||
SELECT
|
||||
"bricktracker_individual_minifigures"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigures"."quantity",
|
||||
"bricktracker_individual_minifigures"."description",
|
||||
"bricktracker_individual_minifigures"."storage",
|
||||
"bricktracker_individual_minifigures"."purchase_location",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"storage_meta"."name" AS "storage_name",
|
||||
"purchase_meta"."name" AS "purchase_location_name",
|
||||
IFNULL("problem_join"."total_missing", 0) AS "total_missing",
|
||||
IFNULL("problem_join"."total_damaged", 0) AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigures"
|
||||
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_storages" AS "storage_meta"
|
||||
ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
|
||||
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
|
||||
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
GROUP BY "bricktracker_individual_minifigure_parts"."id"
|
||||
) "problem_join"
|
||||
ON "bricktracker_individual_minifigures"."id" = "problem_join"."id"
|
||||
|
||||
WHERE "bricktracker_individual_minifigures"."storage" IS NOT DISTINCT FROM :storage
|
||||
|
||||
{% if order %}
|
||||
ORDER BY {{ order }}
|
||||
{% else %}
|
||||
ORDER BY "bricktracker_individual_minifigures"."rowid" DESC
|
||||
{% endif %}
|
||||
|
||||
{% if limit %}
|
||||
LIMIT {{ limit }}
|
||||
{% endif %}
|
||||
@@ -0,0 +1,48 @@
|
||||
-- Get all individual minifigure instances without storage
|
||||
SELECT
|
||||
"bricktracker_individual_minifigures"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigures"."quantity",
|
||||
"bricktracker_individual_minifigures"."description",
|
||||
"bricktracker_individual_minifigures"."storage",
|
||||
"bricktracker_individual_minifigures"."purchase_location",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"storage_meta"."name" AS "storage_name",
|
||||
"purchase_meta"."name" AS "purchase_location_name",
|
||||
IFNULL("problem_join"."total_missing", 0) AS "total_missing",
|
||||
IFNULL("problem_join"."total_damaged", 0) AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigures"
|
||||
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_storages" AS "storage_meta"
|
||||
ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
|
||||
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
|
||||
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
GROUP BY "bricktracker_individual_minifigure_parts"."id"
|
||||
) "problem_join"
|
||||
ON "bricktracker_individual_minifigures"."id" = "problem_join"."id"
|
||||
|
||||
WHERE "bricktracker_individual_minifigures"."storage" IS NULL
|
||||
|
||||
{% if order %}
|
||||
ORDER BY {{ order }}
|
||||
{% else %}
|
||||
ORDER BY "bricktracker_individual_minifigures"."rowid" DESC
|
||||
{% endif %}
|
||||
|
||||
{% if limit %}
|
||||
LIMIT {{ limit }}
|
||||
{% endif %}
|
||||
@@ -0,0 +1,10 @@
|
||||
INSERT INTO "bricktracker_individual_minifigure_owners" (
|
||||
"id",
|
||||
"{{name}}"
|
||||
) VALUES (
|
||||
:id,
|
||||
:state
|
||||
)
|
||||
ON CONFLICT("id")
|
||||
DO UPDATE SET "{{name}}" = :state
|
||||
WHERE "bricktracker_individual_minifigure_owners"."id" IS NOT DISTINCT FROM :id
|
||||
@@ -0,0 +1,10 @@
|
||||
INSERT INTO "bricktracker_individual_minifigure_statuses" (
|
||||
"id",
|
||||
"{{name}}"
|
||||
) VALUES (
|
||||
:id,
|
||||
:state
|
||||
)
|
||||
ON CONFLICT("id")
|
||||
DO UPDATE SET "{{name}}" = :state
|
||||
WHERE "bricktracker_individual_minifigure_statuses"."id" IS NOT DISTINCT FROM :id
|
||||
@@ -0,0 +1,10 @@
|
||||
INSERT INTO "bricktracker_individual_minifigure_tags" (
|
||||
"id",
|
||||
"{{name}}"
|
||||
) VALUES (
|
||||
:id,
|
||||
:state
|
||||
)
|
||||
ON CONFLICT("id")
|
||||
DO UPDATE SET "{{name}}" = :state
|
||||
WHERE "bricktracker_individual_minifigure_tags"."id" IS NOT DISTINCT FROM :id
|
||||
@@ -0,0 +1,23 @@
|
||||
INSERT OR IGNORE INTO "bricktracker_individual_minifigure_parts" (
|
||||
"id",
|
||||
"part",
|
||||
"color",
|
||||
"spare",
|
||||
"quantity",
|
||||
"element",
|
||||
"rebrickable_inventory",
|
||||
"missing",
|
||||
"damaged",
|
||||
"checked"
|
||||
) VALUES (
|
||||
:id,
|
||||
:part,
|
||||
:color,
|
||||
:spare,
|
||||
:quantity,
|
||||
:element,
|
||||
:rebrickable_inventory,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
)
|
||||
@@ -0,0 +1,38 @@
|
||||
-- Query parts for a specific individual minifigure instance
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigure_parts"."part",
|
||||
"bricktracker_individual_minifigure_parts"."color",
|
||||
"bricktracker_individual_minifigure_parts"."spare",
|
||||
"bricktracker_individual_minifigure_parts"."quantity",
|
||||
"bricktracker_individual_minifigure_parts"."element",
|
||||
"bricktracker_individual_minifigure_parts"."missing" AS "total_missing",
|
||||
"bricktracker_individual_minifigure_parts"."damaged" AS "total_damaged",
|
||||
"bricktracker_individual_minifigure_parts"."checked",
|
||||
"rebrickable_parts"."color_name",
|
||||
"rebrickable_parts"."color_rgb",
|
||||
"rebrickable_parts"."color_transparent",
|
||||
"rebrickable_parts"."bricklink_color_id",
|
||||
"rebrickable_parts"."bricklink_color_name",
|
||||
"rebrickable_parts"."bricklink_part_num",
|
||||
"rebrickable_parts"."name",
|
||||
"rebrickable_parts"."image",
|
||||
"rebrickable_parts"."image_id",
|
||||
"rebrickable_parts"."url",
|
||||
"rebrickable_parts"."print",
|
||||
NULL AS "total_quantity",
|
||||
NULL AS "total_spare",
|
||||
NULL AS "total_sets",
|
||||
NULL AS "total_minifigures"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
INNER JOIN "rebrickable_parts"
|
||||
ON "bricktracker_individual_minifigure_parts"."part" = "rebrickable_parts"."part"
|
||||
AND "bricktracker_individual_minifigure_parts"."color" = "rebrickable_parts"."color_id"
|
||||
WHERE "bricktracker_individual_minifigure_parts"."id" IS NOT DISTINCT FROM :id
|
||||
|
||||
{% if order %}
|
||||
ORDER BY {{ order | replace('"combined"', '"bricktracker_individual_minifigure_parts"') | replace('"bricktracker_parts"', '"bricktracker_individual_minifigure_parts"') }}
|
||||
{% endif %}
|
||||
@@ -0,0 +1,33 @@
|
||||
-- Select a specific part from an individual minifigure instance
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigure_parts"."part",
|
||||
"bricktracker_individual_minifigure_parts"."color",
|
||||
"bricktracker_individual_minifigure_parts"."spare",
|
||||
"bricktracker_individual_minifigure_parts"."quantity",
|
||||
"bricktracker_individual_minifigure_parts"."element",
|
||||
"bricktracker_individual_minifigure_parts"."missing",
|
||||
"bricktracker_individual_minifigure_parts"."damaged",
|
||||
"bricktracker_individual_minifigure_parts"."checked",
|
||||
"rebrickable_parts"."color_name",
|
||||
"rebrickable_parts"."color_rgb",
|
||||
"rebrickable_parts"."color_transparent",
|
||||
"rebrickable_parts"."bricklink_color_id",
|
||||
"rebrickable_parts"."bricklink_color_name",
|
||||
"rebrickable_parts"."bricklink_part_num",
|
||||
"rebrickable_parts"."name",
|
||||
"rebrickable_parts"."image",
|
||||
"rebrickable_parts"."image_id",
|
||||
"rebrickable_parts"."url",
|
||||
"rebrickable_parts"."print"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
INNER JOIN "rebrickable_parts"
|
||||
ON "bricktracker_individual_minifigure_parts"."part" = "rebrickable_parts"."part"
|
||||
AND "bricktracker_individual_minifigure_parts"."color" = "rebrickable_parts"."color_id"
|
||||
WHERE "bricktracker_individual_minifigure_parts"."id" IS NOT DISTINCT FROM :id
|
||||
AND "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."spare" IS NOT DISTINCT FROM :spare
|
||||
@@ -0,0 +1,6 @@
|
||||
UPDATE "bricktracker_individual_minifigure_parts"
|
||||
SET "checked" = :checked
|
||||
WHERE "bricktracker_individual_minifigure_parts"."id" IS NOT DISTINCT FROM :id
|
||||
AND "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."spare" IS NOT DISTINCT FROM :spare
|
||||
@@ -0,0 +1,6 @@
|
||||
UPDATE "bricktracker_individual_minifigure_parts"
|
||||
SET "damaged" = :damaged
|
||||
WHERE "bricktracker_individual_minifigure_parts"."id" IS NOT DISTINCT FROM :id
|
||||
AND "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."spare" IS NOT DISTINCT FROM :spare
|
||||
@@ -0,0 +1,6 @@
|
||||
UPDATE "bricktracker_individual_minifigure_parts"
|
||||
SET "missing" = :missing
|
||||
WHERE "bricktracker_individual_minifigure_parts"."id" IS NOT DISTINCT FROM :id
|
||||
AND "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."spare" IS NOT DISTINCT FROM :spare
|
||||
@@ -0,0 +1,35 @@
|
||||
-- Get a specific individual minifigure instance by ID
|
||||
SELECT
|
||||
"bricktracker_individual_minifigures"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigures"."quantity",
|
||||
"bricktracker_individual_minifigures"."description",
|
||||
"bricktracker_individual_minifigures"."storage",
|
||||
"bricktracker_individual_minifigures"."purchase_location",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"storage_meta"."name" AS "storage_name",
|
||||
"purchase_meta"."name" AS "purchase_location_name"{{ owners }}{{ statuses }}{{ tags }}
|
||||
FROM "bricktracker_individual_minifigures"
|
||||
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_storages" AS "storage_meta"
|
||||
ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
|
||||
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_owners"
|
||||
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_owners"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_statuses"
|
||||
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_statuses"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_tags"
|
||||
ON "bricktracker_individual_minifigures"."id" IS NOT DISTINCT FROM "bricktracker_individual_minifigure_tags"."id"
|
||||
|
||||
WHERE "bricktracker_individual_minifigures"."id" = :id
|
||||
@@ -0,0 +1,52 @@
|
||||
-- Get all individual minifigure instances for a specific figure
|
||||
SELECT
|
||||
"bricktracker_individual_minifigures"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigures"."quantity",
|
||||
"bricktracker_individual_minifigures"."description",
|
||||
"bricktracker_individual_minifigures"."storage",
|
||||
"bricktracker_individual_minifigures"."purchase_location",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"storage_meta"."name" AS "storage_name",
|
||||
"purchase_meta"."name" AS "purchase_location_name",
|
||||
{{ owners }},
|
||||
{{ statuses }},
|
||||
{{ tags }},
|
||||
IFNULL("problem_join"."total_missing", 0) AS "total_missing",
|
||||
IFNULL("problem_join"."total_damaged", 0) AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigures"
|
||||
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_individual_minifigures"."figure" = "rebrickable_minifigures"."figure"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_storages" AS "storage_meta"
|
||||
ON "bricktracker_individual_minifigures"."storage" = "storage_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_metadata_purchase_locations" AS "purchase_meta"
|
||||
ON "bricktracker_individual_minifigures"."purchase_location" = "purchase_meta"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_owners"
|
||||
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_owners"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_statuses"
|
||||
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_statuses"."id"
|
||||
|
||||
LEFT JOIN "bricktracker_individual_minifigure_tags"
|
||||
ON "bricktracker_individual_minifigures"."id" = "bricktracker_individual_minifigure_tags"."id"
|
||||
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
GROUP BY "bricktracker_individual_minifigure_parts"."id"
|
||||
) "problem_join"
|
||||
ON "bricktracker_individual_minifigures"."id" = "problem_join"."id"
|
||||
|
||||
WHERE "bricktracker_individual_minifigures"."figure" = :figure
|
||||
|
||||
ORDER BY "bricktracker_individual_minifigures"."rowid" DESC
|
||||
@@ -0,0 +1,7 @@
|
||||
UPDATE "bricktracker_individual_minifigures"
|
||||
SET
|
||||
"quantity" = :quantity,
|
||||
"description" = :description,
|
||||
"storage" = :storage,
|
||||
"purchase_location" = :purchase_location
|
||||
WHERE "id" = :id
|
||||
@@ -4,7 +4,7 @@ PRAGMA foreign_keys = ON;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a Bricktable set table: with their unique IDs, and a reference to the Rebrickable set
|
||||
-- Create a Bricktracker set table: with their unique IDs, and a reference to the Rebrickable set
|
||||
CREATE TABLE "bricktracker_sets" (
|
||||
"id" TEXT NOT NULL,
|
||||
"rebrickable_set" TEXT NOT NULL,
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
-- description: Renaming various complicated field names to something simpler, and add a bunch of extra fields for later
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Rename sets table
|
||||
ALTER TABLE "bricktracker_sets" RENAME TO "bricktracker_sets_old";
|
||||
|
||||
-- Create a Bricktracker metadata storage table for later
|
||||
CREATE TABLE "bricktracker_metadata_storages" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);
|
||||
|
||||
-- Create a Bricktracker metadata purchase location table for later
|
||||
CREATE TABLE "bricktracker_metadata_purchase_locations" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);
|
||||
|
||||
-- Re-Create a Bricktracker set table with the simplified name
|
||||
CREATE TABLE "bricktracker_sets" (
|
||||
"id" TEXT NOT NULL,
|
||||
"set" TEXT NOT NULL,
|
||||
"description" TEXT,
|
||||
"storage" TEXT, -- Storage bin location
|
||||
"purchase_date" REAL, -- Purchase data
|
||||
"purchase_location" TEXT, -- Purchase location
|
||||
"purchase_price" REAL, -- Purchase price
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("set") REFERENCES "rebrickable_sets"("set"),
|
||||
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
|
||||
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
|
||||
);
|
||||
|
||||
-- Insert existing sets into the new table
|
||||
INSERT INTO "bricktracker_sets" (
|
||||
"id",
|
||||
"set"
|
||||
)
|
||||
SELECT
|
||||
"bricktracker_sets_old"."id",
|
||||
"bricktracker_sets_old"."rebrickable_set"
|
||||
FROM "bricktracker_sets_old";
|
||||
|
||||
-- Rename status table
|
||||
ALTER TABLE "bricktracker_set_statuses" RENAME TO "bricktracker_set_statuses_old";
|
||||
|
||||
-- Re-create a table for the status of each checkbox
|
||||
CREATE TABLE "bricktracker_set_statuses" (
|
||||
"id" TEXT NOT NULL,
|
||||
{% if structure %}{{ structure }},{% endif %}
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_sets"("id")
|
||||
);
|
||||
|
||||
-- Insert existing status into the new table
|
||||
INSERT INTO "bricktracker_set_statuses" (
|
||||
{% if targets %}{{ targets }},{% endif %}
|
||||
"id"
|
||||
)
|
||||
SELECT
|
||||
{% if sources %}{{ sources }},{% endif %}
|
||||
"bricktracker_set_statuses_old"."bricktracker_set_id"
|
||||
FROM "bricktracker_set_statuses_old";
|
||||
|
||||
-- Delete the original tables
|
||||
DROP TABLE "bricktracker_set_statuses_old";
|
||||
DROP TABLE "bricktracker_sets_old";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,30 @@
|
||||
-- description: Creation of the deduplicated table of Rebrickable minifigures
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a Rebrickable minifigures table: each unique minifigure imported from Rebrickable
|
||||
CREATE TABLE "rebrickable_minifigures" (
|
||||
"figure" TEXT NOT NULL,
|
||||
"number" INTEGER NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"image" TEXT,
|
||||
PRIMARY KEY("figure")
|
||||
);
|
||||
|
||||
-- Insert existing sets into the new table
|
||||
INSERT INTO "rebrickable_minifigures" (
|
||||
"figure",
|
||||
"number",
|
||||
"name",
|
||||
"image"
|
||||
)
|
||||
SELECT
|
||||
"minifigures"."fig_num",
|
||||
CAST(SUBSTR("minifigures"."fig_num", 5) AS INTEGER),
|
||||
"minifigures"."name",
|
||||
"minifigures"."set_img_url"
|
||||
FROM "minifigures"
|
||||
GROUP BY
|
||||
"minifigures"."fig_num";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- description: Migrate the Bricktracker minifigures
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a Bricktracker minifigures table: an amount of minifigures linked to a Bricktracker set
|
||||
CREATE TABLE "bricktracker_minifigures" (
|
||||
"id" TEXT NOT NULL,
|
||||
"figure" TEXT NOT NULL,
|
||||
"quantity" INTEGER NOT NULL,
|
||||
PRIMARY KEY("id", "figure"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_sets"("id"),
|
||||
FOREIGN KEY("figure") REFERENCES "rebrickable_minifigures"("figure")
|
||||
);
|
||||
|
||||
-- Insert existing sets into the new table
|
||||
INSERT INTO "bricktracker_minifigures" (
|
||||
"id",
|
||||
"figure",
|
||||
"quantity"
|
||||
)
|
||||
SELECT
|
||||
"minifigures"."u_id",
|
||||
"minifigures"."fig_num",
|
||||
"minifigures"."quantity"
|
||||
FROM "minifigures";
|
||||
|
||||
-- Rename the original table (don't delete it yet?)
|
||||
ALTER TABLE "minifigures" RENAME TO "minifigures_old";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,42 @@
|
||||
-- description: Creation of the deduplicated table of Rebrickable parts, and add a bunch of extra fields for later
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a Rebrickable parts table: each unique part imported from Rebrickable
|
||||
CREATE TABLE "rebrickable_parts" (
|
||||
"part" TEXT NOT NULL,
|
||||
"color_id" INTEGER NOT NULL,
|
||||
"color_name" TEXT NOT NULL,
|
||||
"color_rgb" TEXT, -- can be NULL because it was not saved before
|
||||
"color_transparent" BOOLEAN, -- can be NULL because it was not saved before
|
||||
"name" TEXT NOT NULL,
|
||||
"category" INTEGER, -- can be NULL because it was not saved before
|
||||
"image" TEXT,
|
||||
"image_id" TEXT,
|
||||
"url" TEXT, -- can be NULL because it was not saved before
|
||||
"print" INTEGER, -- can be NULL, was not saved before
|
||||
PRIMARY KEY("part", "color_id")
|
||||
);
|
||||
|
||||
-- Insert existing parts into the new table
|
||||
INSERT INTO "rebrickable_parts" (
|
||||
"part",
|
||||
"color_id",
|
||||
"color_name",
|
||||
"name",
|
||||
"image",
|
||||
"image_id"
|
||||
)
|
||||
SELECT
|
||||
"inventory"."part_num",
|
||||
"inventory"."color_id",
|
||||
"inventory"."color_name",
|
||||
"inventory"."name",
|
||||
"inventory"."part_img_url",
|
||||
"inventory"."part_img_url_id"
|
||||
FROM "inventory"
|
||||
GROUP BY
|
||||
"inventory"."part_num",
|
||||
"inventory"."color_id";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,73 @@
|
||||
-- description: Migrate the Bricktracker parts (and missing parts), and add a bunch of extra fields for later
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Fix: somehow a deletion bug was introduced in an older release?
|
||||
DELETE FROM "inventory"
|
||||
WHERE "inventory"."u_id" NOT IN (
|
||||
SELECT "bricktracker_sets"."id"
|
||||
FROM "bricktracker_sets"
|
||||
);
|
||||
|
||||
DELETE FROM "missing"
|
||||
WHERE "missing"."u_id" NOT IN (
|
||||
SELECT "bricktracker_sets"."id"
|
||||
FROM "bricktracker_sets"
|
||||
);
|
||||
|
||||
-- Create a Bricktracker parts table: an amount of parts linked to a Bricktracker set
|
||||
CREATE TABLE "bricktracker_parts" (
|
||||
"id" TEXT NOT NULL,
|
||||
"figure" TEXT,
|
||||
"part" TEXT NOT NULL,
|
||||
"color" INTEGER NOT NULL,
|
||||
"spare" BOOLEAN NOT NULL,
|
||||
"quantity" INTEGER NOT NULL,
|
||||
"element" INTEGER,
|
||||
"rebrickable_inventory" INTEGER NOT NULL,
|
||||
"missing" INTEGER NOT NULL DEFAULT 0,
|
||||
"damaged" INTEGER NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY("id", "figure", "part", "color", "spare"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_sets"("id"),
|
||||
FOREIGN KEY("figure") REFERENCES "rebrickable_minifigures"("figure"),
|
||||
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id")
|
||||
);
|
||||
|
||||
-- Insert existing parts into the new table
|
||||
INSERT INTO "bricktracker_parts" (
|
||||
"id",
|
||||
"figure",
|
||||
"part",
|
||||
"color",
|
||||
"spare",
|
||||
"quantity",
|
||||
"element",
|
||||
"rebrickable_inventory",
|
||||
"missing"
|
||||
)
|
||||
SELECT
|
||||
"inventory"."u_id",
|
||||
CASE WHEN SUBSTR("inventory"."set_num", 0, 5) = 'fig-' THEN "inventory"."set_num" ELSE NULL END,
|
||||
"inventory"."part_num",
|
||||
"inventory"."color_id",
|
||||
"inventory"."is_spare",
|
||||
"inventory"."quantity",
|
||||
"inventory"."element_id",
|
||||
"inventory"."id",
|
||||
IFNULL("missing"."quantity", 0)
|
||||
FROM "inventory"
|
||||
LEFT JOIN "missing"
|
||||
ON "inventory"."set_num" IS NOT DISTINCT FROM "missing"."set_num"
|
||||
AND "inventory"."id" IS NOT DISTINCT FROM "missing"."id"
|
||||
AND "inventory"."part_num" IS NOT DISTINCT FROM "missing"."part_num"
|
||||
AND "inventory"."color_id" IS NOT DISTINCT FROM "missing"."color_id"
|
||||
AND "inventory"."element_id" IS NOT DISTINCT FROM "missing"."element_id"
|
||||
AND "inventory"."u_id" IS NOT DISTINCT FROM "missing"."u_id";
|
||||
|
||||
-- Rename the original table (don't delete it yet?)
|
||||
ALTER TABLE "inventory" RENAME TO "inventory_old";
|
||||
ALTER TABLE "missing" RENAME TO "missing_old";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,7 @@
|
||||
-- description: Rename checkboxes to status metadata
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE "bricktracker_set_checkboxes" RENAME TO "bricktracker_metadata_statuses";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,26 @@
|
||||
-- description: Add set owners
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a table to define each set owners: an id and a name
|
||||
CREATE TABLE "bricktracker_metadata_owners" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);
|
||||
|
||||
-- Create a table for the set owners
|
||||
CREATE TABLE "bricktracker_set_owners" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_sets"("id")
|
||||
);
|
||||
|
||||
-- Create a table for the wish owners
|
||||
CREATE TABLE "bricktracker_wish_owners" (
|
||||
"set" TEXT NOT NULL,
|
||||
PRIMARY KEY("set"),
|
||||
FOREIGN KEY("set") REFERENCES "bricktracker_wishes"("set")
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,19 @@
|
||||
-- description: Add set tags
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Create a table to define each set tags: an id and a name
|
||||
CREATE TABLE "bricktracker_metadata_tags" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
PRIMARY KEY("id")
|
||||
);
|
||||
|
||||
-- Create a table for the set tags
|
||||
CREATE TABLE "bricktracker_set_tags" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_sets"("id")
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- description: Add number of parts for minifigures
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Add the number_of_parts column to the minifigures
|
||||
ALTER TABLE "rebrickable_minifigures"
|
||||
ADD COLUMN "number_of_parts" INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- Update the number of parts for each minifigure
|
||||
UPDATE "rebrickable_minifigures"
|
||||
SET "number_of_parts" = "parts_sum"."number_of_parts"
|
||||
FROM (
|
||||
SELECT
|
||||
"parts"."figure",
|
||||
SUM("parts"."quantity") as "number_of_parts"
|
||||
FROM (
|
||||
SELECT
|
||||
"bricktracker_parts"."figure",
|
||||
"bricktracker_parts"."quantity"
|
||||
FROM "bricktracker_parts"
|
||||
WHERE "bricktracker_parts"."figure" IS NOT NULL
|
||||
GROUP BY
|
||||
"bricktracker_parts"."figure",
|
||||
"bricktracker_parts"."part",
|
||||
"bricktracker_parts"."color",
|
||||
"bricktracker_parts"."spare"
|
||||
) "parts"
|
||||
GROUP BY "parts"."figure"
|
||||
) "parts_sum"
|
||||
WHERE "rebrickable_minifigures"."figure" = "parts_sum"."figure";
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,9 @@
|
||||
-- description: Add BrickLink color fields to rebrickable_parts table
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Add BrickLink color fields to the rebrickable_parts table
|
||||
ALTER TABLE "rebrickable_parts" ADD COLUMN "bricklink_color_id" INTEGER;
|
||||
ALTER TABLE "rebrickable_parts" ADD COLUMN "bricklink_color_name" TEXT;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,8 @@
|
||||
-- description: Add BrickLink part number field to rebrickable_parts table
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Add BrickLink part number field to the rebrickable_parts table
|
||||
ALTER TABLE "rebrickable_parts" ADD COLUMN "bricklink_part_num" TEXT;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,9 @@
|
||||
-- description: Add checked field to bricktracker_parts table for part walkthrough tracking
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Add checked field to the bricktracker_parts table
|
||||
-- This allows users to track which parts they have checked during walkthroughs
|
||||
ALTER TABLE "bricktracker_parts" ADD COLUMN "checked" BOOLEAN DEFAULT 0;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,56 @@
|
||||
-- Migration 0019: Performance optimization indexes
|
||||
|
||||
-- High-impact composite index for problem parts aggregation
|
||||
-- Used in set listings, statistics, and problem reports
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_id_missing_damaged
|
||||
ON bricktracker_parts(id, missing, damaged);
|
||||
|
||||
-- Composite index for parts lookup by part and color
|
||||
-- Used in part listings and filtering operations
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_part_color_spare
|
||||
ON bricktracker_parts(part, color, spare);
|
||||
|
||||
-- Composite index for set storage filtering
|
||||
-- Used in set listings filtered by storage location
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_set_storage
|
||||
ON bricktracker_sets("set", storage);
|
||||
|
||||
-- Search optimization index for set names
|
||||
-- Improves text search performance on set listings
|
||||
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_name_lower
|
||||
ON rebrickable_sets(LOWER(name));
|
||||
|
||||
-- Search optimization index for part names
|
||||
-- Improves text search performance on part listings
|
||||
CREATE INDEX IF NOT EXISTS idx_rebrickable_parts_name_lower
|
||||
ON rebrickable_parts(LOWER(name));
|
||||
|
||||
-- Additional indexes for common join patterns
|
||||
|
||||
-- Set purchase filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_purchase_location
|
||||
ON bricktracker_sets(purchase_location);
|
||||
|
||||
-- Parts quantity filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_parts_quantity
|
||||
ON bricktracker_parts(quantity);
|
||||
|
||||
-- Year-based filtering optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_year
|
||||
ON rebrickable_sets(year);
|
||||
|
||||
-- Theme-based filtering optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_theme_id
|
||||
ON rebrickable_sets(theme_id);
|
||||
|
||||
-- Rebrickable sets number and version for sorting
|
||||
CREATE INDEX IF NOT EXISTS idx_rebrickable_sets_number_version
|
||||
ON rebrickable_sets(number, version);
|
||||
|
||||
-- Purchase date filtering and sorting
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_sets_purchase_date
|
||||
ON bricktracker_sets(purchase_date);
|
||||
|
||||
-- Minifigures aggregation optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_minifigures_id_quantity
|
||||
ON bricktracker_minifigures(id, quantity);
|
||||
@@ -0,0 +1,132 @@
|
||||
-- Migration 0020: Add individual minifigures and individual parts tables
|
||||
|
||||
-- Individual minifigures table - tracks individual minifigures not associated with sets
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigures" (
|
||||
"id" TEXT NOT NULL,
|
||||
"figure" TEXT NOT NULL,
|
||||
"quantity" INTEGER NOT NULL DEFAULT 1,
|
||||
"description" TEXT,
|
||||
"storage" TEXT, -- Storage bin location
|
||||
"purchase_date" REAL, -- Purchase date
|
||||
"purchase_location" TEXT, -- Purchase location
|
||||
"purchase_price" REAL, -- Purchase price
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("figure") REFERENCES "rebrickable_minifigures"("figure"),
|
||||
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
|
||||
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
|
||||
);
|
||||
|
||||
-- Individual minifigure statuses
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_statuses" (
|
||||
"id" TEXT NOT NULL,
|
||||
"status_minifigures_collected" BOOLEAN NOT NULL DEFAULT 0,
|
||||
"status_set_checked" BOOLEAN NOT NULL DEFAULT 0,
|
||||
"status_set_collected" BOOLEAN NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
|
||||
);
|
||||
|
||||
-- Individual minifigure owners
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_owners" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
|
||||
);
|
||||
|
||||
-- Individual minifigure tags
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_tags" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id")
|
||||
);
|
||||
|
||||
-- Parts table for individual minifigures - tracks constituent parts
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_minifigure_parts" (
|
||||
"id" TEXT NOT NULL,
|
||||
"part" TEXT NOT NULL,
|
||||
"color" INTEGER NOT NULL,
|
||||
"spare" BOOLEAN NOT NULL,
|
||||
"quantity" INTEGER NOT NULL,
|
||||
"element" INTEGER,
|
||||
"rebrickable_inventory" INTEGER NOT NULL,
|
||||
"missing" INTEGER NOT NULL DEFAULT 0,
|
||||
"damaged" INTEGER NOT NULL DEFAULT 0,
|
||||
"checked" BOOLEAN DEFAULT 0,
|
||||
PRIMARY KEY("id", "part", "color", "spare"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_minifigures"("id"),
|
||||
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id")
|
||||
);
|
||||
|
||||
-- Individual parts table - tracks individual parts not associated with sets
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_parts" (
|
||||
"id" TEXT NOT NULL,
|
||||
"part" TEXT NOT NULL,
|
||||
"color" INTEGER NOT NULL,
|
||||
"quantity" INTEGER NOT NULL DEFAULT 1,
|
||||
"description" TEXT,
|
||||
"storage" TEXT, -- Storage bin location
|
||||
"purchase_date" REAL, -- Purchase date
|
||||
"purchase_location" TEXT, -- Purchase location
|
||||
"purchase_price" REAL, -- Purchase price
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("part", "color") REFERENCES "rebrickable_parts"("part", "color_id"),
|
||||
FOREIGN KEY("storage") REFERENCES "bricktracker_metadata_storages"("id"),
|
||||
FOREIGN KEY("purchase_location") REFERENCES "bricktracker_metadata_purchase_locations"("id")
|
||||
);
|
||||
|
||||
-- Individual part owners
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_owners" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
|
||||
);
|
||||
|
||||
-- Individual part tags
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_tags" (
|
||||
"id" TEXT NOT NULL,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
|
||||
);
|
||||
|
||||
-- Individual part statuses
|
||||
CREATE TABLE IF NOT EXISTS "bricktracker_individual_part_statuses" (
|
||||
"id" TEXT NOT NULL,
|
||||
"status_minifigures_collected" BOOLEAN NOT NULL DEFAULT 0,
|
||||
"status_set_checked" BOOLEAN NOT NULL DEFAULT 0,
|
||||
"status_set_collected" BOOLEAN NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY("id"),
|
||||
FOREIGN KEY("id") REFERENCES "bricktracker_individual_parts"("id")
|
||||
);
|
||||
|
||||
-- Indexes for individual minifigures
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_figure
|
||||
ON bricktracker_individual_minifigures(figure);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_storage
|
||||
ON bricktracker_individual_minifigures(storage);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_location
|
||||
ON bricktracker_individual_minifigures(purchase_location);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigures_purchase_date
|
||||
ON bricktracker_individual_minifigures(purchase_date);
|
||||
|
||||
-- Indexes for individual minifigure parts
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_id_missing_damaged
|
||||
ON bricktracker_individual_minifigure_parts(id, missing, damaged);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_minifigure_parts_part_color
|
||||
ON bricktracker_individual_minifigure_parts(part, color);
|
||||
|
||||
-- Indexes for individual parts
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_part_color
|
||||
ON bricktracker_individual_parts(part, color);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_storage
|
||||
ON bricktracker_individual_parts(storage);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_location
|
||||
ON bricktracker_individual_parts(purchase_location);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bricktracker_individual_parts_purchase_date
|
||||
ON bricktracker_individual_parts(purchase_date);
|
||||
@@ -0,0 +1,23 @@
|
||||
-- Migration 0021: Add existing owner/tag columns to individual minifigure and individual part metadata tables
|
||||
|
||||
-- Add owner columns to individual minifigure owners table
|
||||
ALTER TABLE "bricktracker_individual_minifigure_owners"
|
||||
ADD COLUMN "owner_32479d0a_cd3c_43c6_aa16_b3f378915b13" BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE "bricktracker_individual_minifigure_owners"
|
||||
ADD COLUMN "owner_2f07518d_40e1_4279_b0d0_aa339f195cbf" BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
-- Add tag columns to individual minifigure tags table
|
||||
ALTER TABLE "bricktracker_individual_minifigure_tags"
|
||||
ADD COLUMN "tag_b1b5c316_5caf_4b82_a085_ac4c7ab9b8db" BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
-- Add owner columns to individual part owners table
|
||||
ALTER TABLE "bricktracker_individual_part_owners"
|
||||
ADD COLUMN "owner_32479d0a_cd3c_43c6_aa16_b3f378915b13" BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE "bricktracker_individual_part_owners"
|
||||
ADD COLUMN "owner_2f07518d_40e1_4279_b0d0_aa339f195cbf" BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
-- Add tag columns to individual part tags table
|
||||
ALTER TABLE "bricktracker_individual_part_tags"
|
||||
ADD COLUMN "tag_b1b5c316_5caf_4b82_a085_ac4c7ab9b8db" BOOLEAN NOT NULL DEFAULT 0;
|
||||
@@ -0,0 +1,74 @@
|
||||
-- Combined query for both set-based and individual minifigures
|
||||
SELECT
|
||||
"combined"."quantity",
|
||||
"combined"."figure",
|
||||
"combined"."number",
|
||||
"combined"."number_of_parts",
|
||||
"combined"."name",
|
||||
"combined"."image",
|
||||
{% block total_missing %}
|
||||
NULL AS "total_missing", -- dummy for order: total_missing
|
||||
{% endblock %}
|
||||
{% block total_damaged %}
|
||||
NULL AS "total_damaged", -- dummy for order: total_damaged
|
||||
{% endblock %}
|
||||
{% block total_quantity %}
|
||||
NULL AS "total_quantity", -- dummy for order: total_quantity
|
||||
{% endblock %}
|
||||
{% block total_sets %}
|
||||
NULL AS "total_sets", -- dummy for order: total_sets
|
||||
{% endblock %}
|
||||
{% block total_individual %}
|
||||
NULL AS "total_individual" -- dummy for order: total_individual
|
||||
{% endblock %}
|
||||
FROM (
|
||||
-- Set-based minifigures
|
||||
SELECT
|
||||
"bricktracker_minifigures"."id",
|
||||
"bricktracker_minifigures"."quantity",
|
||||
"rebrickable_minifigures"."figure",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"bricktracker_minifigures"."rowid" AS "rowid",
|
||||
'set' AS "source_type"
|
||||
FROM "bricktracker_minifigures"
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_minifigures"."figure" IS NOT DISTINCT FROM "rebrickable_minifigures"."figure"
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Individual minifigures
|
||||
SELECT
|
||||
"bricktracker_individual_minifigures"."id",
|
||||
"bricktracker_individual_minifigures"."quantity",
|
||||
"rebrickable_minifigures"."figure",
|
||||
"rebrickable_minifigures"."number",
|
||||
"rebrickable_minifigures"."number_of_parts",
|
||||
"rebrickable_minifigures"."name",
|
||||
"rebrickable_minifigures"."image",
|
||||
"bricktracker_individual_minifigures"."rowid" AS "rowid",
|
||||
'individual' AS "source_type"
|
||||
FROM "bricktracker_individual_minifigures"
|
||||
INNER JOIN "rebrickable_minifigures"
|
||||
ON "bricktracker_individual_minifigures"."figure" IS NOT DISTINCT FROM "rebrickable_minifigures"."figure"
|
||||
) AS "combined"
|
||||
|
||||
{% block join %}{% endblock %}
|
||||
|
||||
{% block where %}{% endblock %}
|
||||
|
||||
{% block group %}{% endblock %}
|
||||
|
||||
{% if order %}
|
||||
ORDER BY {{ order }}
|
||||
{% endif %}
|
||||
|
||||
{% if limit %}
|
||||
LIMIT {{ limit }}
|
||||
{% endif %}
|
||||
|
||||
{% if offset %}
|
||||
OFFSET {{ offset }}
|
||||
{% endif %}
|
||||
@@ -1,31 +0,0 @@
|
||||
SELECT
|
||||
"minifigures"."fig_num",
|
||||
"minifigures"."set_num",
|
||||
"minifigures"."name",
|
||||
"minifigures"."quantity",
|
||||
"minifigures"."set_img_url",
|
||||
"minifigures"."u_id",
|
||||
{% block total_missing %}
|
||||
NULL AS "total_missing", -- dummy for order: total_missing
|
||||
{% endblock %}
|
||||
{% block total_quantity %}
|
||||
NULL AS "total_quantity", -- dummy for order: total_quantity
|
||||
{% endblock %}
|
||||
{% block total_sets %}
|
||||
NULL AS "total_sets" -- dummy for order: total_sets
|
||||
{% endblock %}
|
||||
FROM "minifigures"
|
||||
|
||||
{% block join %}{% endblock %}
|
||||
|
||||
{% block where %}{% endblock %}
|
||||
|
||||
{% block group %}{% endblock %}
|
||||
|
||||
{% if order %}
|
||||
ORDER BY {{ order }}
|
||||
{% endif %}
|
||||
|
||||
{% if limit %}
|
||||
LIMIT {{ limit }}
|
||||
{% endif %}
|
||||
@@ -1,15 +1,9 @@
|
||||
INSERT INTO "minifigures" (
|
||||
"fig_num",
|
||||
"set_num",
|
||||
"name",
|
||||
"quantity",
|
||||
"set_img_url",
|
||||
"u_id"
|
||||
INSERT INTO "bricktracker_minifigures" (
|
||||
"id",
|
||||
"figure",
|
||||
"quantity"
|
||||
) VALUES (
|
||||
:fig_num,
|
||||
:set_num,
|
||||
:name,
|
||||
:quantity,
|
||||
:set_img_url,
|
||||
:u_id
|
||||
:id,
|
||||
:figure,
|
||||
:quantity
|
||||
)
|
||||
|
||||
@@ -1,34 +1,67 @@
|
||||
{% extends 'minifigure/base/select.sql' %}
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_missing %}
|
||||
SUM(IFNULL("missing_join"."total", 0)) AS "total_missing",
|
||||
SUM(IFNULL("problem_join"."total_missing", 0)) AS "total_missing",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_damaged %}
|
||||
SUM(IFNULL("problem_join"."total_damaged", 0)) AS "total_damaged",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_quantity %}
|
||||
SUM(IFNULL("minifigures"."quantity", 0)) AS "total_quantity",
|
||||
SUM(IFNULL("combined"."quantity", 0)) AS "total_quantity",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_sets %}
|
||||
COUNT("minifigures"."set_num") AS "total_sets"
|
||||
SUM(CASE WHEN "combined"."source_type" = 'set' THEN 1 ELSE 0 END) AS "total_sets",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_individual %}
|
||||
SUM(CASE WHEN "combined"."source_type" = 'individual' THEN 1 ELSE 0 END) AS "total_individual"
|
||||
{% endblock %}
|
||||
|
||||
{% block join %}
|
||||
-- LEFT JOIN + SELECT to avoid messing the total
|
||||
-- Combine parts from both set-based and individual minifigures
|
||||
LEFT JOIN (
|
||||
-- Set-based minifigure parts
|
||||
SELECT
|
||||
"missing"."set_num",
|
||||
"missing"."u_id",
|
||||
SUM("missing"."quantity") AS total
|
||||
FROM "missing"
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure",
|
||||
SUM("bricktracker_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_parts"."damaged") AS "total_damaged"
|
||||
FROM "bricktracker_parts"
|
||||
WHERE "bricktracker_parts"."figure" IS NOT NULL
|
||||
GROUP BY
|
||||
"missing"."set_num",
|
||||
"missing"."u_id"
|
||||
) missing_join
|
||||
ON "minifigures"."u_id" IS NOT DISTINCT FROM "missing_join"."u_id"
|
||||
AND "minifigures"."fig_num" IS NOT DISTINCT FROM "missing_join"."set_num"
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure"
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Individual minifigure parts
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"combined"."figure",
|
||||
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures" ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
INNER JOIN "rebrickable_minifigures" AS "combined" ON "bricktracker_individual_minifigures"."figure" = "combined"."figure"
|
||||
GROUP BY
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"combined"."figure"
|
||||
) "problem_join"
|
||||
ON "combined"."id" IS NOT DISTINCT FROM "problem_join"."id"
|
||||
AND "combined"."figure" IS NOT DISTINCT FROM "problem_join"."figure"
|
||||
{% endblock %}
|
||||
|
||||
{% block where %}
|
||||
{% if search_query %}
|
||||
WHERE (LOWER("combined"."name") LIKE LOWER('%{{ search_query }}%'))
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"minifigures"."fig_num"
|
||||
"combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_missing %}
|
||||
SUM(IFNULL("problem_join"."total_missing", 0)) AS "total_missing",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_damaged %}
|
||||
SUM(IFNULL("problem_join"."total_damaged", 0)) AS "total_damaged",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_quantity %}
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
SUM(CASE
|
||||
WHEN "combined"."source_type" = 'set' AND "set_owners"."owner_{{ owner_id }}" = 1 THEN IFNULL("combined"."quantity", 0)
|
||||
WHEN "combined"."source_type" = 'individual' AND "individual_owners"."owner_{{ owner_id }}" = 1 THEN IFNULL("combined"."quantity", 0)
|
||||
ELSE 0
|
||||
END) AS "total_quantity",
|
||||
{% else %}
|
||||
SUM(IFNULL("combined"."quantity", 0)) AS "total_quantity",
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block total_sets %}
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
SUM(CASE
|
||||
WHEN "combined"."source_type" = 'set' AND "set_owners"."owner_{{ owner_id }}" = 1 THEN 1
|
||||
ELSE 0
|
||||
END) AS "total_sets",
|
||||
{% else %}
|
||||
SUM(CASE WHEN "combined"."source_type" = 'set' THEN 1 ELSE 0 END) AS "total_sets",
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block total_individual %}
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
SUM(CASE
|
||||
WHEN "combined"."source_type" = 'individual' AND "individual_owners"."owner_{{ owner_id }}" = 1 THEN 1
|
||||
ELSE 0
|
||||
END) AS "total_individual"
|
||||
{% else %}
|
||||
SUM(CASE WHEN "combined"."source_type" = 'individual' THEN 1 ELSE 0 END) AS "total_individual"
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block join %}
|
||||
-- Join with set owners for set-based minifigures
|
||||
LEFT JOIN "bricktracker_sets"
|
||||
ON "combined"."id" = "bricktracker_sets"."id" AND "combined"."source_type" = 'set'
|
||||
|
||||
LEFT JOIN "bricktracker_set_owners" AS "set_owners"
|
||||
ON "bricktracker_sets"."id" = "set_owners"."id"
|
||||
|
||||
-- Join with individual minifigure owners for individual minifigures
|
||||
LEFT JOIN "bricktracker_individual_minifigure_owners" AS "individual_owners"
|
||||
ON "combined"."id" = "individual_owners"."id" AND "combined"."source_type" = 'individual'
|
||||
|
||||
-- LEFT JOIN + SELECT to avoid messing the total
|
||||
LEFT JOIN (
|
||||
-- Set-based minifigure parts
|
||||
SELECT
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure",
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
SUM(CASE WHEN "owner_parts"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."missing" ELSE 0 END) AS "total_missing",
|
||||
SUM(CASE WHEN "owner_parts"."owner_{{ owner_id }}" = 1 THEN "bricktracker_parts"."damaged" ELSE 0 END) AS "total_damaged"
|
||||
{% else %}
|
||||
SUM("bricktracker_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_parts"."damaged") AS "total_damaged"
|
||||
{% endif %}
|
||||
FROM "bricktracker_parts"
|
||||
INNER JOIN "bricktracker_sets" AS "parts_sets"
|
||||
ON "bricktracker_parts"."id" = "parts_sets"."id"
|
||||
LEFT JOIN "bricktracker_set_owners" AS "owner_parts"
|
||||
ON "parts_sets"."id" = "owner_parts"."id"
|
||||
WHERE "bricktracker_parts"."figure" IS NOT NULL
|
||||
GROUP BY
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure"
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Individual minifigure parts
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
SUM(CASE WHEN "owner_individual"."owner_{{ owner_id }}" = 1 THEN "bricktracker_individual_minifigure_parts"."missing" ELSE 0 END) AS "total_missing",
|
||||
SUM(CASE WHEN "owner_individual"."owner_{{ owner_id }}" = 1 THEN "bricktracker_individual_minifigure_parts"."damaged" ELSE 0 END) AS "total_damaged"
|
||||
{% else %}
|
||||
SUM("bricktracker_individual_minifigure_parts"."missing") AS "total_missing",
|
||||
SUM("bricktracker_individual_minifigure_parts"."damaged") AS "total_damaged"
|
||||
{% endif %}
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
LEFT JOIN "bricktracker_individual_minifigure_owners" AS "owner_individual"
|
||||
ON "bricktracker_individual_minifigures"."id" = "owner_individual"."id"
|
||||
GROUP BY
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure"
|
||||
) "problem_join"
|
||||
ON "combined"."id" = "problem_join"."id"
|
||||
AND "combined"."figure" = "problem_join"."figure"
|
||||
{% endblock %}
|
||||
|
||||
{% block where %}
|
||||
{% set conditions = [] %}
|
||||
{% if owner_id and owner_id != 'all' %}
|
||||
{% set _ = conditions.append('(("combined"."source_type" = \'set\' AND "set_owners"."owner_' ~ owner_id ~ '" = 1) OR ("combined"."source_type" = \'individual\' AND "individual_owners"."owner_' ~ owner_id ~ '" = 1))') %}
|
||||
{% endif %}
|
||||
{% if search_query %}
|
||||
{% set _ = conditions.append('(LOWER("combined"."name") LIKE LOWER(\'%' ~ search_query ~ '%\'))') %}
|
||||
{% endif %}
|
||||
{% if conditions %}
|
||||
WHERE {{ conditions | join(' AND ') }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"combined"."figure"
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,59 @@
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_damaged %}
|
||||
SUM("parts_combined"."damaged") AS "total_damaged",
|
||||
{% endblock %}
|
||||
|
||||
{% block join %}
|
||||
-- Join with parts from both set-based and individual minifigures
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure",
|
||||
"bricktracker_parts"."damaged"
|
||||
FROM "bricktracker_parts"
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigure_parts"."damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
) AS "parts_combined"
|
||||
ON "combined"."id" IS NOT DISTINCT FROM "parts_combined"."id"
|
||||
AND "combined"."figure" IS NOT DISTINCT FROM "parts_combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
{% block where %}
|
||||
WHERE "combined"."figure" IN (
|
||||
-- Find figures with damaged parts from both sources
|
||||
SELECT "figure"
|
||||
FROM (
|
||||
SELECT "bricktracker_parts"."figure"
|
||||
FROM "bricktracker_parts"
|
||||
WHERE "bricktracker_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_parts"."figure" IS NOT NULL
|
||||
AND "bricktracker_parts"."damaged" > 0
|
||||
|
||||
UNION
|
||||
|
||||
SELECT "bricktracker_individual_minifigures"."figure"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
WHERE "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."damaged" > 0
|
||||
) AS "damaged_figures"
|
||||
GROUP BY "figure"
|
||||
)
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"combined"."figure"
|
||||
{% endblock %}
|
||||
@@ -1,6 +1,5 @@
|
||||
{% extends 'minifigure/base/select.sql' %}
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block where %}
|
||||
WHERE "minifigures"."u_id" IS NOT DISTINCT FROM :u_id
|
||||
AND "minifigures"."set_num" IS NOT DISTINCT FROM :set_num
|
||||
WHERE "combined"."id" IS NOT DISTINCT FROM :id AND "combined"."source_type" = 'set'
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,17 +1,40 @@
|
||||
{% extends 'minifigure/base/select.sql' %}
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_missing %}
|
||||
SUM(IFNULL("missing"."quantity", 0)) AS "total_missing",
|
||||
SUM("parts_combined"."missing") AS "total_missing",
|
||||
{% endblock %}
|
||||
|
||||
{% block total_damaged %}
|
||||
SUM("parts_combined"."damaged") AS "total_damaged",
|
||||
{% endblock %}
|
||||
|
||||
{% block join %}
|
||||
LEFT JOIN "missing"
|
||||
ON "minifigures"."fig_num" IS NOT DISTINCT FROM "missing"."set_num"
|
||||
AND "minifigures"."u_id" IS NOT DISTINCT FROM "missing"."u_id"
|
||||
-- Join with parts from both set-based and individual minifigures
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure",
|
||||
"bricktracker_parts"."missing",
|
||||
"bricktracker_parts"."damaged"
|
||||
FROM "bricktracker_parts"
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigure_parts"."missing",
|
||||
"bricktracker_individual_minifigure_parts"."damaged"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
) AS "parts_combined"
|
||||
ON "combined"."id" IS NOT DISTINCT FROM "parts_combined"."id"
|
||||
AND "combined"."figure" IS NOT DISTINCT FROM "parts_combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"minifigures"."fig_num",
|
||||
"minifigures"."u_id"
|
||||
"combined"."figure",
|
||||
"combined"."id"
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,30 +1,59 @@
|
||||
{% extends 'minifigure/base/select.sql' %}
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_missing %}
|
||||
SUM(IFNULL("missing"."quantity", 0)) AS "total_missing",
|
||||
SUM("parts_combined"."missing") AS "total_missing",
|
||||
{% endblock %}
|
||||
|
||||
{% block join %}
|
||||
LEFT JOIN "missing"
|
||||
ON "minifigures"."fig_num" IS NOT DISTINCT FROM "missing"."set_num"
|
||||
AND "minifigures"."u_id" IS NOT DISTINCT FROM "missing"."u_id"
|
||||
-- Join with parts from both set-based and individual minifigures
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
"bricktracker_parts"."id",
|
||||
"bricktracker_parts"."figure",
|
||||
"bricktracker_parts"."missing"
|
||||
FROM "bricktracker_parts"
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
"bricktracker_individual_minifigure_parts"."id",
|
||||
"bricktracker_individual_minifigures"."figure",
|
||||
"bricktracker_individual_minifigure_parts"."missing"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
) AS "parts_combined"
|
||||
ON "combined"."id" IS NOT DISTINCT FROM "parts_combined"."id"
|
||||
AND "combined"."figure" IS NOT DISTINCT FROM "parts_combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
{% block where %}
|
||||
WHERE "minifigures"."fig_num" IN (
|
||||
SELECT
|
||||
"missing"."set_num"
|
||||
FROM "missing"
|
||||
WHERE "combined"."figure" IN (
|
||||
-- Find figures with missing parts from both sources
|
||||
SELECT "figure"
|
||||
FROM (
|
||||
SELECT "bricktracker_parts"."figure"
|
||||
FROM "bricktracker_parts"
|
||||
WHERE "bricktracker_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_parts"."figure" IS NOT NULL
|
||||
AND "bricktracker_parts"."missing" > 0
|
||||
|
||||
WHERE "missing"."color_id" IS NOT DISTINCT FROM :color_id
|
||||
AND "missing"."element_id" IS NOT DISTINCT FROM :element_id
|
||||
AND "missing"."part_num" IS NOT DISTINCT FROM :part_num
|
||||
UNION
|
||||
|
||||
GROUP BY "missing"."set_num"
|
||||
SELECT "bricktracker_individual_minifigures"."figure"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
WHERE "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_individual_minifigure_parts"."missing" > 0
|
||||
) AS "missing_figures"
|
||||
GROUP BY "figure"
|
||||
)
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"minifigures"."fig_num"
|
||||
"combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,24 +1,34 @@
|
||||
{% extends 'minifigure/base/select.sql' %}
|
||||
{% extends 'minifigure/base/base.sql' %}
|
||||
|
||||
{% block total_quantity %}
|
||||
SUM("minifigures"."quantity") AS "total_quantity",
|
||||
SUM("combined"."quantity") AS "total_quantity",
|
||||
{% endblock %}
|
||||
|
||||
{% block where %}
|
||||
WHERE "minifigures"."fig_num" IN (
|
||||
SELECT
|
||||
"inventory"."set_num"
|
||||
FROM "inventory"
|
||||
WHERE "combined"."figure" IN (
|
||||
-- Find figures from both set-based and individual minifigure parts
|
||||
SELECT "figure"
|
||||
FROM (
|
||||
SELECT "bricktracker_parts"."figure"
|
||||
FROM "bricktracker_parts"
|
||||
WHERE "bricktracker_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_parts"."color" IS NOT DISTINCT FROM :color
|
||||
AND "bricktracker_parts"."figure" IS NOT NULL
|
||||
|
||||
WHERE "inventory"."color_id" IS NOT DISTINCT FROM :color_id
|
||||
AND "inventory"."element_id" IS NOT DISTINCT FROM :element_id
|
||||
AND "inventory"."part_num" IS NOT DISTINCT FROM :part_num
|
||||
UNION
|
||||
|
||||
GROUP BY "inventory"."set_num"
|
||||
SELECT "bricktracker_individual_minifigures"."figure"
|
||||
FROM "bricktracker_individual_minifigure_parts"
|
||||
INNER JOIN "bricktracker_individual_minifigures"
|
||||
ON "bricktracker_individual_minifigure_parts"."id" = "bricktracker_individual_minifigures"."id"
|
||||
WHERE "bricktracker_individual_minifigure_parts"."part" IS NOT DISTINCT FROM :part
|
||||
AND "bricktracker_individual_minifigure_parts"."color" IS NOT DISTINCT FROM :color
|
||||
) AS "parts_figures"
|
||||
GROUP BY "figure"
|
||||
)
|
||||
{% endblock %}
|
||||
|
||||
{% block group %}
|
||||
GROUP BY
|
||||
"minifigures"."fig_num"
|
||||
"combined"."figure"
|
||||
{% endblock %}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user