diff --git a/.github/validate_customizations.py b/.github/validate_customizations.py deleted file mode 100644 index 89cbf720..00000000 --- a/.github/validate_customizations.py +++ /dev/null @@ -1,169 +0,0 @@ -import json -import pathlib -import sys - - -def scrub(txt: str) -> str: - return txt.replace(" ", "_").replace("-", "_").lower() - - -def unscrub(txt: str) -> str: - return txt.replace("_", " ").replace("-", " ").title() - - -def get_customized_doctypes(): - apps_dir = pathlib.Path(__file__).resolve().parent.parent.parent - apps_order = pathlib.Path(__file__).resolve().parent.parent.parent.parent / "sites" / "apps.txt" - apps_order = apps_order.read_text().split("\n") - customized_doctypes = {} - for _app_dir in apps_order: - app_dir = (apps_dir / _app_dir).resolve() - if not app_dir.is_dir(): - continue - modules = (app_dir / _app_dir / "modules.txt").read_text().split("\n") - for module in modules: - if not (app_dir / _app_dir / scrub(module) / "custom").exists(): - continue - for custom_file in list((app_dir / _app_dir / scrub(module) / "custom").glob("**/*.json")): - if custom_file.stem in customized_doctypes: - customized_doctypes[custom_file.stem].append(custom_file.resolve()) - else: - customized_doctypes[custom_file.stem] = [custom_file.resolve()] - - return dict(sorted(customized_doctypes.items())) - - -def validate_module(customized_doctypes, set_module=False): - exceptions = [] - app_dir = pathlib.Path(__file__).resolve().parent.parent - this_app = app_dir.stem - modules = (app_dir / this_app / "modules.txt").read_text().split("\n") - for doctype, customize_files in customized_doctypes.items(): - for customize_file in customize_files: - if ( - not this_app == customize_file.parent.parent.parent.parent.stem - ): # Updated to accommodate local folders named same as app - continue - module = customize_file.parent.parent.stem - file_contents = json.loads(customize_file.read_text()) - if file_contents.get("custom_fields"): - for custom_field in file_contents.get("custom_fields"): - if set_module: - custom_field["module"] = unscrub(module) - continue - if not custom_field.get("module"): - exceptions.append( - f"Custom Field for {custom_field.get('dt')} in {this_app} '{custom_field.get('fieldname')}' does not have a module key" - ) - continue - elif custom_field.get("module") not in modules: - exceptions.append( - f"Custom Field for {custom_field.get('dt')} in {this_app} '{custom_field.get('fieldname')}' has module key ({custom_field.get('module')}) associated with another app" - ) - continue - if file_contents.get("property_setters"): - for ps in file_contents.get("property_setters"): - if set_module: - ps["module"] = unscrub(module) - continue - if not ps.get("module"): - exceptions.append( - f"Property Setter for {ps.get('doc_type')} in {this_app} '{ps.get('property')}' on {ps.get('field_name')} does not have a module key" - ) - continue - elif ps.get("module") not in modules: - exceptions.append( - f"Property Setter for {ps.get('doc_type')} in {this_app} '{ps.get('property')}' on {ps.get('field_name')} has module key ({ps.get('module')}) associated with another app" - ) - continue - if set_module: - with customize_file.open("w", encoding="UTF-8") as target: - json.dump(file_contents, target, sort_keys=True, indent=2) - - return exceptions - - -def validate_no_custom_perms(customized_doctypes): - exceptions = [] - this_app = pathlib.Path(__file__).resolve().parent.parent.stem - for doctype, customize_files in customized_doctypes.items(): - for customize_file in customize_files: - if ( - not this_app == customize_file.parent.parent.parent.parent.stem - ): # Updated to accommodate local folders named same as app - continue - file_contents = json.loads(customize_file.read_text()) - if file_contents.get("custom_perms"): - exceptions.append(f"Customization for {doctype} in {this_app} contains custom permissions") - return exceptions - - -def validate_duplicate_customizations(customized_doctypes): - exceptions = [] - common_fields = {} - common_property_setters = {} - app_dir = pathlib.Path(__file__).resolve().parent.parent - this_app = app_dir.stem - for doctype, customize_files in customized_doctypes.items(): - if len(customize_files) == 1: - continue - common_fields[doctype] = {} - common_property_setters[doctype] = {} - for customize_file in customize_files: - module = customize_file.parent.parent.stem - app = customize_file.parent.parent.parent.parent.stem - file_contents = json.loads(customize_file.read_text()) - if file_contents.get("custom_fields"): - fields = [cf.get("fieldname") for cf in file_contents.get("custom_fields")] - common_fields[doctype][module] = fields - if file_contents.get("property_setters"): - ps = [ps.get("name") for ps in file_contents.get("property_setters")] - common_property_setters[doctype][module] = ps - - for doctype, module_and_fields in common_fields.items(): - if this_app not in module_and_fields.keys(): - continue - this_modules_fields = module_and_fields.pop(this_app) - for module, fields in module_and_fields.items(): - for field in fields: - if field in this_modules_fields: - exceptions.append( - f"Custom Field for {unscrub(doctype)} in {this_app} '{field}' also appears in customizations for {module}" - ) - - for doctype, module_and_ps in common_property_setters.items(): - if this_app not in module_and_ps.keys(): - continue - this_modules_ps = module_and_ps.pop(this_app) - for module, ps in module_and_ps.items(): - for p in ps: - if p in this_modules_ps: - exceptions.append( - f"Property Setter for {unscrub(doctype)} in {this_app} on '{p}' also appears in customizations for {module}" - ) - - return exceptions - - -def validate_customizations(set_module): - customized_doctypes = get_customized_doctypes() - exceptions = validate_no_custom_perms(customized_doctypes) - exceptions += validate_module(customized_doctypes, set_module) - exceptions += validate_duplicate_customizations(customized_doctypes) - - return exceptions - - -if __name__ == "__main__": - exceptions = [] - set_module = False - for arg in sys.argv: - if arg == "--set-module": - set_module = True - exceptions.append(validate_customizations(set_module)) - - if exceptions: - for exception in exceptions: - [print(e) for e in exception] # TODO: colorize - - sys.exit(1) if all(exceptions) else sys.exit(0) diff --git a/.github/workflows/code-duplication.yml b/.github/workflows/code-duplication.yml new file mode 100644 index 00000000..d70489ee --- /dev/null +++ b/.github/workflows/code-duplication.yml @@ -0,0 +1,29 @@ +name: Code Duplication + +on: + push: + branches: ["*"] + pull_request: + branches: ["*"] + +permissions: + contents: read + pull-requests: write + issues: write + +jobs: + duplication: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-node@v4 + with: + node-version: '18' + + - uses: agritheory/test_utils/actions/code_duplication@main + with: + max_clones: 60 + max_percentage: 5.0 diff --git a/.github/workflows/generate-changelog.yml b/.github/workflows/generate-changelog.yml new file mode 100644 index 00000000..99870a15 --- /dev/null +++ b/.github/workflows/generate-changelog.yml @@ -0,0 +1,23 @@ +name: Generate Changelog + +on: + pull_request: + types: [opened, reopened, synchronize] + issue_comment: + types: [created] + +jobs: + generate-changelog: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Generate Changelog + uses: agritheory/test_utils/actions/generate_changelog@main + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} diff --git a/.github/workflows/overrides.yml b/.github/workflows/overrides.yml new file mode 100644 index 00000000..dd59d0ef --- /dev/null +++ b/.github/workflows/overrides.yml @@ -0,0 +1,20 @@ +name: Track Overrides + +on: + pull_request: + branches: + - version-14 + - version-15 + +jobs: + track_overrides: + runs-on: ubuntu-latest + name: Track Overrides + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Track Overrides + uses: agritheory/test_utils/actions/track_overrides@main + with: + app: beam diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de282cf5..32e6881e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,14 +11,30 @@ repos: exclude: '.*json$|.*txt$|.*csv|.*md|.*svg' - id: check-yaml - id: no-commit-to-branch - args: ['--branch', 'develop'] + args: ['--branch', 'version-15', '--branch', 'version-16'] - id: check-merge-conflict - id: check-ast - id: check-json - id: check-toml - - id: check-yaml - id: debug-statements + - repo: https://github.com/codespell-project/codespell + rev: v2.4.1 + hooks: + - id: codespell + args: ["--ignore-words-list", "notin"] + exclude: 'yarn.lock|poetry.lock' + additional_dependencies: + - tomli + + - repo: local + hooks: + - id: no-titled-beam + name: "Use 'BEAM' not 'Beam'" + language: pygrep + entry: '\bBeam\b' + files: '\.md$' + - repo: https://github.com/asottile/pyupgrade rev: v3.20.0 hooks: @@ -30,47 +46,43 @@ repos: hooks: - id: black - - repo: https://github.com/PyCQA/autoflake - rev: v2.3.1 - hooks: - - id: autoflake - args: [--remove-all-unused-imports, --in-place] - - - repo: https://github.com/PyCQA/isort - rev: 6.0.1 - hooks: - - id: isort - - repo: https://github.com/PyCQA/flake8 - rev: 7.3.0 + rev: 7.2.0 hooks: - id: flake8 additional_dependencies: ['flake8-bugbear'] - - repo: https://github.com/codespell-project/codespell - rev: v2.4.1 - hooks: - - id: codespell - additional_dependencies: - - tomli - - repo: https://github.com/agritheory/test_utils - rev: v1.0.0 + rev: v1.20.1 hooks: - id: update_pre_commit_config + - id: validate_frappe_project - id: validate_copyright files: '\.(js|ts|py|md)$' args: ['--app', 'beam'] + - id: bylines + exclude: 'README.md|CHANGELOG.md' - id: clean_customized_doctypes args: ['--app', 'beam'] - id: validate_customizations + - id: validate_patches + args: ['--app', 'beam'] + - id: track_overrides + args: ['--directory', '.', '--app', 'beam', '--base-branch', 'version-15'] + - id: check_code_duplication + args: ['--max-clones', '60', '--max-percentage', '5.0'] - - repo: local + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 hooks: - id: prettier - name: prettier - entry: npx prettier . --write --ignore-path .prettierignore - language: node + types_or: [javascript, vue, scss] + exclude: | + (?x)^( + .*node_modules.*| + beam/public/dist/.*| + beam/public/js/lib/.* + )$ ci: autoupdate_schedule: weekly diff --git a/CHANGELOG.md b/CHANGELOG.md index 903e94f2..cc9e6965 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ - # CHANGELOG diff --git a/beam/__init__.py b/beam/__init__.py index 1f0539c1..51b884c0 100644 --- a/beam/__init__.py +++ b/beam/__init__.py @@ -1,4 +1,4 @@ # Copyright (c) 2025, AgriTheory and contributors # For license information, please see license.txt -__version__ = "15.4.0" +__version__ = "15.8.0" diff --git a/beam/beam/barcodes.py b/beam/beam/barcodes.py index 1fb52d9c..3944a439 100644 --- a/beam/beam/barcodes.py +++ b/beam/beam/barcodes.py @@ -6,6 +6,7 @@ from io import BytesIO import frappe +import pyqrcode from barcode import Code128 from barcode.writer import ImageWriter from erpnext import get_default_company @@ -18,6 +19,24 @@ def create_beam_barcode(doc, method=None): if doc.doctype == "Item" and doc.is_stock_item == 0: return + if ( + doc.get("item_group") + and doc.doctype == "Item" + and frappe.db.exists("Item Group", "Products") + and doc.item_group + in frappe.get_all("Item Group", {"name": ("descendants of", "Products")}, pluck="name") + ): + # TODO: refactor this to be configurable to "Products" or "sold" items that do not require handling units + return + company = get_default_company() + if frappe.db.exists("BEAM Settings", {"company": company}): + settings = frappe.get_cached_doc("BEAM Settings", {"company": company}) + try: + allowed = frappe.parse_json(settings.auto_barcode_doctypes or '["Item", "Warehouse"]') + except Exception: + allowed = ["Item", "Warehouse"] + if doc.doctype not in allowed: + return if any([b for b in doc.barcodes if b.barcode_type == "Code128"]): return if doc.doctype == "User" and (doc.name == "Guest" or doc.name == "Administrator"): @@ -55,6 +74,37 @@ def barcode128(barcode_text: str) -> str: return f'' +@frappe.whitelist() +@frappe.read_only() +def get_qr_code(qr_text: str) -> str: + if not qr_text: + return "" + + company = get_default_company() + settings = ( + create_beam_settings(company) + if not frappe.db.exists("BEAM Settings", {"company": company}) + else frappe.get_doc("BEAM Settings", {"company": company}) + ) + + qr_scale = getattr(settings, "qr_scale", 8) # Module size in pixels + qr_border = getattr(settings, "qr_border", 4) # Border size in modules + qr_error_correct = getattr(settings, "qr_error_correct", "M") # Error correction level + + qr = pyqrcode.create(qr_text, error=qr_error_correct) + temp = BytesIO() + qr.png( + temp, + scale=int(qr_scale), + module_color=(0, 0, 0, 255), + background=(255, 255, 255, 255), + quiet_zone=int(qr_border), + ) + temp.seek(0) + encoded = base64.b64encode(temp.getvalue()).decode("ascii") + return f'' + + @frappe.whitelist() @frappe.read_only() def formatted_zpl_barcode(barcode_text: str) -> str: @@ -123,7 +173,12 @@ def add_to_label(label: Label, element: Printable): class ZPLLabelStringOutput(Label): def __init__( - self, width: int = 100, length: int = 100, dpi: int = 203, print_speed: int = 2, copies: int = 1 + self, + width: int = 100, + length: int = 100, + dpi: int = 203, + print_speed: int = 2, + copies: int = 1, ): super().__init__(width, length, dpi, print_speed, copies) diff --git a/beam/beam/boot.py b/beam/beam/boot.py index 11c51603..968cd6da 100644 --- a/beam/beam/boot.py +++ b/beam/beam/boot.py @@ -8,11 +8,24 @@ def boot_session(bootinfo): bootinfo.beam = get_scan_doctypes() - bootinfo.enabled_beam_settings = frappe.get_all( + bootinfo.beam["settings"] = get_beam_settings() + bootinfo.beam["default_hu_print_format"] = frappe.get_meta("Handling Unit").get( + "default_print_format" + ) + + +def get_beam_settings(): + """Get BEAM Settings for all companies, keyed by company name.""" + settings = {} + beam_settings = frappe.get_all( "BEAM Settings", - filters={"enable_demand": True}, - pluck="name", + fields=["company", "enable_handling_units"], ) + for setting in beam_settings: + settings[setting.company] = { + "enable_handling_units": setting.enable_handling_units, + } + return settings def redirect_to_beam(): diff --git a/beam/beam/custom/bom_scrap_item.json b/beam/beam/custom/bom_scrap_item.json index 95230daf..4037dd3e 100644 --- a/beam/beam/custom/bom_scrap_item.json +++ b/beam/beam/custom/bom_scrap_item.json @@ -6,9 +6,7 @@ "bold": 0, "collapsible": 0, "columns": 0, - "creation": "2023-08-22 15:23:16.272692", "default": null, - "docstatus": 0, "dt": "BOM Scrap Item", "fetch_if_empty": 0, "fieldname": "create_handling_unit", @@ -29,15 +27,11 @@ "is_virtual": 0, "label": "Create Handling Unit", "length": 0, - "modified": "2023-08-22 15:23:52.267428", - "modified_by": "Administrator", "module": "BEAM", "name": "BOM Scrap Item-create_handling_unit", "no_copy": 0, "non_negative": 0, - "owner": "Administrator", "permlevel": 0, - "precision": "", "print_hide": 0, "print_hide_if_no_value": 0, "read_only": 0, @@ -49,9 +43,7 @@ "unique": 0 } ], - "custom_perms": [], "doctype": "BOM Scrap Item", - "links": [], "property_setters": [], "sync_on_migrate": 1 } diff --git a/beam/beam/custom/item.json b/beam/beam/custom/item.json index ee256d60..a342fc85 100644 --- a/beam/beam/custom/item.json +++ b/beam/beam/custom/item.json @@ -6,9 +6,7 @@ "bold": 0, "collapsible": 0, "columns": 0, - "creation": "2024-02-26 23:52:53.051024", - "default": "1", - "docstatus": 0, + "default": "0", "dt": "Item", "fetch_if_empty": 0, "fieldname": "enable_handling_unit", @@ -29,15 +27,11 @@ "is_virtual": 0, "label": "Enable Handling Unit", "length": 0, - "modified": "2024-02-26 23:52:53.051024", - "modified_by": "Administrator", "module": "BEAM", "name": "Item-enable_handling_unit", "no_copy": 0, "non_negative": 0, - "owner": "Administrator", "permlevel": 0, - "precision": "", "print_hide": 0, "print_hide_if_no_value": 0, "read_only": 0, @@ -49,9 +43,7 @@ "unique": 0 } ], - "custom_perms": [], "doctype": "Item", - "links": [], "property_setters": [], "sync_on_migrate": 1 } diff --git a/beam/beam/custom/item_barcode.json b/beam/beam/custom/item_barcode.json index 2e6bb577..55b70641 100644 --- a/beam/beam/custom/item_barcode.json +++ b/beam/beam/custom/item_barcode.json @@ -1,20 +1,14 @@ { "custom_fields": [], - "custom_perms": [], "doctype": "Item Barcode", "property_setters": [ { - "creation": "2022-06-16 09:40:22.875922", "doc_type": "Item Barcode", - "docstatus": 0, "doctype_or_field": "DocField", "field_name": "barcode_type", "idx": 0, - "modified": "2022-06-16 09:40:22.875922", - "modified_by": "Administrator", "module": "BEAM", "name": "Item Barcode-barcode_type-options", - "owner": "Administrator", "property": "options", "property_type": "Text", "value": "\nEAN\nUPC-A\nCode128" diff --git a/beam/beam/custom/network_printer_settings.json b/beam/beam/custom/network_printer_settings.json new file mode 100644 index 00000000..aa230540 --- /dev/null +++ b/beam/beam/custom/network_printer_settings.json @@ -0,0 +1,98 @@ +{ + "custom_fields": [ + { + "allow_in_quick_entry": 0, + "allow_on_submit": 0, + "bold": 0, + "collapsible": 0, + "columns": 0, + "dt": "Network Printer Settings", + "fetch_if_empty": 0, + "fieldname": "printer_type", + "fieldtype": "Select", + "hidden": 0, + "hide_border": 0, + "hide_days": 0, + "hide_seconds": 0, + "ignore_user_permissions": 0, + "ignore_xss_filter": 0, + "in_global_search": 0, + "in_list_view": 0, + "in_preview": 0, + "in_standard_filter": 0, + "insert_after": "printer_name", + "is_system_generated": 0, + "is_virtual": 0, + "label": "Printer Type", + "length": 0, + "module": "BEAM", + "name": "Network Printer Settings-printer_type", + "no_copy": 0, + "options": "\nGeneral Purpose\nLabel / RAW", + "permlevel": 0, + "print_hide": 0, + "print_hide_if_no_value": 0, + "read_only": 0, + "report_hide": 0, + "reqd": 0, + "search_index": 0, + "sort_options": 0, + "translatable": 0, + "unique": 0 + }, + { + "allow_in_quick_entry": 0, + "allow_on_submit": 0, + "bold": 0, + "collapsible": 0, + "columns": 0, + "dt": "Network Printer Settings", + "fetch_if_empty": 0, + "fieldname": "printer_location", + "fieldtype": "Data", + "hidden": 0, + "hide_border": 0, + "hide_days": 0, + "hide_seconds": 0, + "ignore_user_permissions": 0, + "ignore_xss_filter": 0, + "in_global_search": 0, + "in_list_view": 0, + "in_preview": 0, + "in_standard_filter": 0, + "insert_after": "printer_type", + "is_system_generated": 0, + "is_virtual": 0, + "label": "Printer Location", + "length": 0, + "module": "BEAM", + "name": "Network Printer Settings-printer_location", + "no_copy": 0, + "permlevel": 0, + "print_hide": 0, + "print_hide_if_no_value": 0, + "read_only": 0, + "report_hide": 0, + "reqd": 0, + "search_index": 0, + "sort_options": 0, + "translatable": 0, + "unique": 0 + } + ], + "property_setters": [ + { + "doc_type": "Network Printer Settings", + "doctype_or_field": "DocField", + "field_name": "printer_name", + "idx": 0, + "module": "BEAM", + "name": "Network Printer Settings-printer_name-fieldtype", + "property": "fieldtype", + "property_type": "Data", + "value": "Autocomplete" + } + ], + "doctype": "Network Printer Settings", + "sync_on_migrate": 1 +} diff --git a/beam/beam/custom/stock_entry_detail.json b/beam/beam/custom/stock_entry_detail.json index 6fc6385b..1728d497 100644 --- a/beam/beam/custom/stock_entry_detail.json +++ b/beam/beam/custom/stock_entry_detail.json @@ -6,10 +6,7 @@ "bold": 0, "collapsible": 0, "columns": 0, - "creation": "2023-09-13 12:51:04.950175", "default": null, - "depends_on": "", - "docstatus": 0, "dt": "Stock Entry Detail", "fetch_if_empty": 0, "fieldname": "recombine_on_cancel", @@ -30,15 +27,11 @@ "is_virtual": 0, "label": "Recombine On Cancel", "length": 0, - "modified": "2023-09-13 12:51:04.950175", - "modified_by": "Administrator", "module": "BEAM", "name": "Stock Entry Detail-recombine_on_cancel", "no_copy": 1, "non_negative": 0, - "owner": "Administrator", "permlevel": 0, - "precision": "", "print_hide": 0, "print_hide_if_no_value": 0, "read_only": 1, @@ -50,9 +43,7 @@ "unique": 0 } ], - "custom_perms": [], "doctype": "Stock Entry Detail", - "links": [], "property_setters": [], "sync_on_migrate": 1 } diff --git a/beam/beam/custom/user.json b/beam/beam/custom/user.json index e1d92164..c7efa884 100644 --- a/beam/beam/custom/user.json +++ b/beam/beam/custom/user.json @@ -1,99 +1,85 @@ { - "custom_fields": [ - { - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "creation": "2022-06-16 09:48:36.521275", - "default": null, - "docstatus": 0, - "dt": "User", - "fetch_if_empty": 0, - "fieldname": "barcode_section", - "fieldtype": "Section Break", - "hidden": 0, - "hide_border": 0, - "hide_days": 0, - "hide_seconds": 0, - "idx": 26, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_preview": 0, - "in_standard_filter": 0, - "insert_after": "mobile_no", - "label": "Barcodes", - "length": 0, - "modified": "2022-06-16 09:48:36.521275", - "modified_by": "Administrator", - "module": "BEAM", - "name": "User-barcode_section", - "no_copy": 0, - "non_negative": 0, - "owner": "Administrator", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "creation": "2022-06-16 09:48:36.701251", - "default": null, - "docstatus": 0, - "dt": "User", - "fetch_if_empty": 0, - "fieldname": "barcodes", - "fieldtype": "Table", - "hidden": 0, - "hide_border": 0, - "hide_days": 0, - "hide_seconds": 0, - "idx": 27, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_preview": 0, - "in_standard_filter": 0, - "insert_after": "barcode_section", - "label": "", - "length": 0, - "modified": "2022-06-16 09:48:36.701251", - "modified_by": "Administrator", - "module": "BEAM", - "name": "User-barcodes", - "no_copy": 0, - "non_negative": 0, - "options": "Item Barcode", - "owner": "Administrator", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "translatable": 0, - "unique": 0 - } - ], - "custom_perms": [], - "doctype": "User", - "property_setters": [], - "sync_on_migrate": 1 -} + "custom_fields": [ + { + "allow_in_quick_entry": 0, + "allow_on_submit": 0, + "bold": 0, + "collapsible": 0, + "columns": 0, + "default": null, + "dt": "User", + "fetch_if_empty": 0, + "fieldname": "barcode_section", + "fieldtype": "Section Break", + "hidden": 0, + "hide_border": 0, + "hide_days": 0, + "hide_seconds": 0, + "idx": 26, + "ignore_user_permissions": 0, + "ignore_xss_filter": 0, + "in_global_search": 0, + "in_list_view": 0, + "in_preview": 0, + "in_standard_filter": 0, + "insert_after": "mobile_no", + "label": "Barcodes", + "length": 0, + "module": "BEAM", + "name": "User-barcode_section", + "no_copy": 0, + "non_negative": 0, + "permlevel": 0, + "print_hide": 0, + "print_hide_if_no_value": 0, + "read_only": 0, + "report_hide": 0, + "reqd": 0, + "search_index": 0, + "translatable": 0, + "unique": 0 + }, + { + "allow_in_quick_entry": 0, + "allow_on_submit": 0, + "bold": 0, + "collapsible": 0, + "columns": 0, + "default": null, + "dt": "User", + "fetch_if_empty": 0, + "fieldname": "barcodes", + "fieldtype": "Table", + "hidden": 0, + "hide_border": 0, + "hide_days": 0, + "hide_seconds": 0, + "idx": 27, + "ignore_user_permissions": 0, + "ignore_xss_filter": 0, + "in_global_search": 0, + "in_list_view": 0, + "in_preview": 0, + "in_standard_filter": 0, + "insert_after": "barcode_section", + "length": 0, + "module": "BEAM", + "name": "User-barcodes", + "no_copy": 0, + "non_negative": 0, + "options": "Item Barcode", + "permlevel": 0, + "print_hide": 0, + "print_hide_if_no_value": 0, + "read_only": 0, + "report_hide": 0, + "reqd": 0, + "search_index": 0, + "translatable": 0, + "unique": 0 + } + ], + "doctype": "User", + "property_setters": [], + "sync_on_migrate": 1 +} \ No newline at end of file diff --git a/beam/beam/custom/warehouse.json b/beam/beam/custom/warehouse.json index 8861a08d..79dafeae 100644 --- a/beam/beam/custom/warehouse.json +++ b/beam/beam/custom/warehouse.json @@ -6,9 +6,7 @@ "bold": 0, "collapsible": 0, "columns": 0, - "creation": "2022-06-16 09:48:36.521275", "default": null, - "docstatus": 0, "dt": "Warehouse", "fetch_if_empty": 0, "fieldname": "barcode_section", @@ -27,15 +25,11 @@ "insert_after": "pin", "label": "Barcodes", "length": 0, - "modified": "2022-06-16 09:48:36.521275", - "modified_by": "Administrator", "module": "BEAM", "name": "Warehouse-barcode_section", "no_copy": 0, "non_negative": 0, - "owner": "Administrator", "permlevel": 0, - "precision": "", "print_hide": 0, "print_hide_if_no_value": 0, "read_only": 0, @@ -51,9 +45,7 @@ "bold": 0, "collapsible": 0, "columns": 0, - "creation": "2022-06-16 09:48:36.701251", "default": null, - "docstatus": 0, "dt": "Warehouse", "fetch_if_empty": 0, "fieldname": "barcodes", @@ -70,18 +62,13 @@ "in_preview": 0, "in_standard_filter": 0, "insert_after": "barcode_section", - "label": "", "length": 0, - "modified": "2022-06-16 09:48:36.701251", - "modified_by": "Administrator", "module": "BEAM", "name": "Warehouse-barcodes", "no_copy": 0, "non_negative": 0, "options": "Item Barcode", - "owner": "Administrator", "permlevel": 0, - "precision": "", "print_hide": 0, "print_hide_if_no_value": 0, "read_only": 0, @@ -92,7 +79,6 @@ "unique": 0 } ], - "custom_perms": [], "doctype": "Warehouse", "property_setters": [], "sync_on_migrate": 1 diff --git a/beam/beam/doctype/beam_settings/beam_settings.js b/beam/beam/doctype/beam_settings/beam_settings.js index 6899c00c..8fc5be36 100644 --- a/beam/beam/doctype/beam_settings/beam_settings.js +++ b/beam/beam/doctype/beam_settings/beam_settings.js @@ -1,14 +1,80 @@ // Copyright (c) 2024, AgriTheory and contributors // For license information, please see license.txt -frappe.ui.form.on('BEAM Settings', { - onload_post_render: frm => { +frappe.ui.form.on('BEAM Mobile Route', { + routes_add: frm => { frm.fields_dict.routes.grid.update_docfield_property('component', 'options', frm.doc.__onload.components) }, }) +frappe.dom.set_style(` + .barcode-auto-generate-editor input[type="checkbox"]:not(:checked) + .label-area { + text-decoration: line-through; + color: var(--text-muted); + } +`) -frappe.ui.form.on('BEAM Mobile Route', { - routes_add: frm => { +frappe.ui.form.on('BEAM Settings', { + refresh(frm) { + const wrapper = $(frm.fields_dict.barcode_exclusions_html.wrapper) + wrapper.empty() + wrapper.addClass('barcode-auto-generate-editor').css({ + border: '1px solid var(--border-color)', + borderRadius: 'var(--border-radius)', + padding: 'var(--padding-md)', + }) + frm.barcode_exclusions_editor = new BEAMBarcodeAutoGenerateEditor(wrapper, frm) + }, + onload_post_render: frm => { frm.fields_dict.routes.grid.update_docfield_property('component', 'options', frm.doc.__onload.components) }, }) + +class BEAMBarcodeAutoGenerateEditor { + constructor(wrapper, frm) { + this.wrapper = wrapper + this.frm = frm + this.setup() + } + + get allowed() { + try { + return JSON.parse(this.frm.doc.auto_barcode_doctypes || '["Item", "Warehouse"]') + } catch { + return ['Item', 'Warehouse'] + } + } + + setup() { + this.multicheck = frappe.ui.form.make_control({ + parent: this.wrapper, + df: { + fieldname: 'auto_barcode_doctypes', + fieldtype: 'MultiCheck', + select_all: true, + columns: '15rem', + get_data: () => { + return frappe + .xcall('beam.beam.doctype.beam_settings.beam_settings.get_doctypes_with_item_barcodes') + .then(doctypes => { + const allowed = this.allowed + return doctypes.map(dt => ({ + label: __(dt), + value: dt, + checked: allowed.includes(dt), + })) + }) + }, + on_change: () => { + this.sync_json() + this.frm.dirty() + }, + }, + render_input: true, + }) + } + + sync_json() { + const checked = this.multicheck.get_checked_options() + frappe.model.set_value(this.frm.doctype, this.frm.docname, 'auto_barcode_doctypes', JSON.stringify(checked)) + } +} diff --git a/beam/beam/doctype/beam_settings/beam_settings.json b/beam/beam/doctype/beam_settings/beam_settings.json index bbeb2eba..c5662dac 100644 --- a/beam/beam/doctype/beam_settings/beam_settings.json +++ b/beam/beam/doctype/beam_settings/beam_settings.json @@ -1,6 +1,5 @@ { "actions": [], - "allow_rename": 1, "autoname": "field:company", "creation": "2024-03-18 17:06:58.552900", "doctype": "DocType", @@ -10,10 +9,19 @@ "company", "barcode_font_size", "enable_handling_units", + "scan_serial_no", "ignore_drop_shipped_items", "column_break_twrc", "receiving_workstation", "shipping_workstation", + "column_break_vhpb", + "qr_scale", + "qr_border", + "qr_error_correct", + "barcode_generation_section", + "barcode_exclusions_html", + "column_break_barcode", + "auto_barcode_doctypes", "demand_tab", "enable_demand", "warehouse_types", @@ -26,16 +34,6 @@ "show_scan_output" ], "fields": [ - { - "fieldname": "company", - "fieldtype": "Link", - "in_list_view": 1, - "label": "Company", - "options": "Company", - "reqd": 1, - "set_only_once": 1, - "unique": 1 - }, { "default": "1", "fieldname": "enable_handling_units", @@ -132,25 +130,81 @@ "fieldname": "show_scan_output", "fieldtype": "Check", "label": "Show Scan Output in Mobile View" + }, + { + "default": "0", + "fieldname": "scan_serial_no", + "fieldtype": "Check", + "label": "Enable Scanning of Serial Numbers" + }, + { + "fieldname": "company", + "fieldtype": "Link", + "label": "Company", + "options": "Company", + "unique": 1 + }, + { + "fieldname": "column_break_vhpb", + "fieldtype": "Column Break" + }, + { + "default": "8", + "fieldname": "qr_scale", + "fieldtype": "Int", + "label": "QR Scale", + "non_negative": 1 + }, + { + "default": "4", + "fieldname": "qr_border", + "fieldtype": "Int", + "label": "QR Border", + "non_negative": 1 + }, + { + "default": "M", + "fieldname": "qr_error_correct", + "fieldtype": "Select", + "label": "QR Error Correct", + "options": "L\nM\nQ\nH" + }, + { + "fieldname": "barcode_generation_section", + "fieldtype": "Section Break", + "label": "Barcode Generation" + }, + { + "fieldname": "barcode_exclusions_html", + "fieldtype": "HTML", + "label": "Disable Auto-Generation For" + }, + { + "fieldname": "column_break_barcode", + "fieldtype": "Column Break" + }, + { + "default": "[\"Item\", \"Warehouse\"]", + "fieldname": "auto_barcode_doctypes", + "fieldtype": "JSON", + "hidden": 1, + "label": "Auto Barcode Doctypes" } ], - "index_web_pages_for_search": 1, "links": [], - "modified": "2025-05-23 10:43:46.152483", + "modified": "2026-01-02 20:34:56.013615", "modified_by": "Administrator", "module": "BEAM", "name": "BEAM Settings", - "naming_rule": "Expression (old style)", + "naming_rule": "By fieldname", "owner": "Administrator", "permissions": [ { "create": 1, "delete": 1, "email": 1, - "export": 1, "print": 1, "read": 1, - "report": 1, "role": "System Manager", "share": 1, "write": 1 @@ -159,15 +213,14 @@ "create": 1, "delete": 1, "email": 1, - "export": 1, "print": 1, "read": 1, - "report": 1, "role": "Stock Manager", "share": 1, "write": 1 } ], + "row_format": "Dynamic", "sort_field": "modified", "sort_order": "DESC", "states": [], diff --git a/beam/beam/doctype/beam_settings/beam_settings.py b/beam/beam/doctype/beam_settings/beam_settings.py index ea7f73cf..feaafd48 100644 --- a/beam/beam/doctype/beam_settings/beam_settings.py +++ b/beam/beam/doctype/beam_settings/beam_settings.py @@ -72,3 +72,20 @@ def get_configuration_hooks(): bm = frappe.get_hooks().get("beam_mobile") components = sorted(list(set(bm.get("components").keys()))) return frappe._dict({"components": components}) + + +@frappe.whitelist() +def get_doctypes_with_item_barcodes() -> list[str]: + """Return all doctypes that have a Table field with options 'Item Barcode'.""" + existing_doctypes = set(frappe.get_all("DocType", pluck="name")) + standard = frappe.get_all( + "DocField", + filters={"fieldtype": "Table", "options": "Item Barcode"}, + pluck="parent", + ) + custom = frappe.get_all( + "Custom Field", + filters={"fieldtype": "Table", "options": "Item Barcode"}, + pluck="dt", + ) + return sorted(existing_doctypes.intersection(standard + custom)) diff --git a/beam/beam/doctype/beam_settings/test_beam_settings.py b/beam/beam/doctype/beam_settings/test_beam_settings.py deleted file mode 100644 index 61700c28..00000000 --- a/beam/beam/doctype/beam_settings/test_beam_settings.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) 2024, AgriTheory and Contributors -# See license.txt - -# import frappe -from frappe.tests.utils import FrappeTestCase - - -class TestBEAMSettings(FrappeTestCase): - pass diff --git a/beam/beam/doctype/handling_unit/handling_unit.py b/beam/beam/doctype/handling_unit/handling_unit.py index 74a81795..eeba0de7 100644 --- a/beam/beam/doctype/handling_unit/handling_unit.py +++ b/beam/beam/doctype/handling_unit/handling_unit.py @@ -23,6 +23,8 @@ def autoname(self): self.handling_unit_name = self.name = str(uuid.uuid4().int >> 64) def validate(self): + if frappe.db.exists("Item Barcode", {"barcode": self.name, "parent": self.name}): + return barcode = frappe.new_doc("Item Barcode") barcode.parenttype = "Handling Unit" barcode.barcode_type = "Code128" diff --git a/beam/beam/overrides/company.py b/beam/beam/overrides/company.py new file mode 100644 index 00000000..86fbbb04 --- /dev/null +++ b/beam/beam/overrides/company.py @@ -0,0 +1,11 @@ +# Copyright (c) 2026, AgriTheory and contributors +# For license information, please see license.txt + +import frappe + +from beam.beam.doctype.beam_settings.beam_settings import create_beam_settings + + +def create_company_beam_settings(doc, method=None): + if not frappe.db.exists("BEAM Settings", {"company": doc.name}): + create_beam_settings(doc.name) diff --git a/beam/beam/overrides/network_printer_settings.py b/beam/beam/overrides/network_printer_settings.py new file mode 100644 index 00000000..0579e59e --- /dev/null +++ b/beam/beam/overrides/network_printer_settings.py @@ -0,0 +1,62 @@ +# Copyright (c) 2025, AgriTheory and contributors +# For license information, please see license.txt + +import frappe +from frappe import _ +from frappe.printing.doctype.network_printer_settings.network_printer_settings import ( + NetworkPrinterSettings, +) + + +class BEAMNetworkPrinterSettings(NetworkPrinterSettings): + @frappe.whitelist() + def get_printers_list(self, ip="127.0.0.1", port=631): + printer_list = [] + try: + import cups + except ImportError: + frappe.throw( + _( + """This feature can not be used as dependencies are missing. + Please contact your system manager to enable this by installing pycups!""" + ) + ) + return + try: + cups.setServer(self.server_ip) + cups.setPort(self.port) + conn = cups.Connection() + printers = conn.getPrinters() + for printer_id, printer in printers.items(): + make_model = printer["printer-make-and-model"] + location = printer.get("printer-location", "") + description = f"{make_model}, {location}" if location else make_model + printer_list.append( + { + "value": printer_id, + "label": printer_id, + "description": description, + "location": location, + } + ) + except RuntimeError: + frappe.throw(_("Failed to connect to server")) + except frappe.ValidationError: + frappe.throw(_("Failed to connect to server")) + return printer_list + + def validate(self): + self.push_location_to_cups() + + def push_location_to_cups(self): + if not self.printer_name: + return + try: + import cups + + cups.setServer(self.server_ip) + cups.setPort(self.port) + conn = cups.Connection() + conn.setPrinterLocation(self.printer_name, self.printer_location or "") + except Exception: + pass diff --git a/beam/beam/overrides/stock_entry.py b/beam/beam/overrides/stock_entry.py index 53a13052..24297311 100644 --- a/beam/beam/overrides/stock_entry.py +++ b/beam/beam/overrides/stock_entry.py @@ -20,23 +20,27 @@ def update_stock_ledger(self): self.get_sle_for_source_warehouse(sl_entries, finished_item_row) self.get_sle_for_target_warehouse(sl_entries, finished_item_row) - # Add handling_unit to Stock Ledger Entries + # Ensure handling_unit is set on SLE entries if enabled if settings.enable_handling_units: for sle in sl_entries: - if hasattr(sle, "voucher_detail_no") and sle.voucher_detail_no: - for item in self.get("items"): - if item.name == sle.voucher_detail_no: - # For transfers with both handling_unit and to_handling_unit - if item.handling_unit and item.to_handling_unit: - if sle.get("warehouse") == item.s_warehouse: - # Source warehouse uses original handling_unit - sle.handling_unit = item.handling_unit - elif sle.get("warehouse") == item.t_warehouse: - # Target warehouse uses to_handling_unit - sle.handling_unit = item.to_handling_unit - elif item.handling_unit: - sle.handling_unit = item.handling_unit - break + if hasattr(sle, "get") and "voucher_detail_no" in sle: + item_row = next( + (item for item in self.items if item.name == sle.get("voucher_detail_no")), None + ) + if item_row: + # For source warehouse (consumption), use handling_unit + if ( + sle.get("warehouse") == item_row.s_warehouse + and hasattr(item_row, "handling_unit") + and item_row.handling_unit + ): + sle["handling_unit"] = item_row.handling_unit + # For target warehouse (receipt), use to_handling_unit if it exists, otherwise handling_unit + elif sle.get("warehouse") == item_row.t_warehouse: + if hasattr(item_row, "to_handling_unit") and item_row.to_handling_unit: + sle["handling_unit"] = item_row.to_handling_unit + elif hasattr(item_row, "handling_unit") and item_row.handling_unit: + sle["handling_unit"] = item_row.handling_unit if self.docstatus == 2: sl_entries.reverse() @@ -49,7 +53,12 @@ def update_stock_ledger(self): def make_handling_unit_sles(self): hu_sles = [] for d in self.get("items"): - if self.docstatus == 2 and not d.recombine_on_cancel and d.handling_unit and d.to_handling_unit: + # Only process when cancelling AND user wants to keep separate (NOT recombine) + if self.docstatus != 2 or d.recombine_on_cancel or not d.handling_unit: + continue + + if d.handling_unit and d.to_handling_unit: + # Material Transfer types: both HUs on the same row sle = self.get_sl_entries( d, { @@ -72,6 +81,32 @@ def make_handling_unit_sles(self): _sle["handling_unit"] = d.to_handling_unit _sle["is_cancelled"] = 0 hu_sles.append(_sle) + elif d.s_warehouse and not d.t_warehouse: + # Repack/Manufacture source row: re-consume from source HU + sle = self.get_sl_entries( + d, + { + "warehouse": cstr(d.s_warehouse), + "actual_qty": -flt(d.transfer_qty), + "incoming_rate": flt(d.valuation_rate), + }, + ) + sle["handling_unit"] = d.handling_unit + sle["is_cancelled"] = 0 + hu_sles.append(sle) + elif d.t_warehouse and not d.s_warehouse: + # Repack/Manufacture target row: re-add to target HU + sle = self.get_sl_entries( + d, + { + "warehouse": cstr(d.t_warehouse), + "actual_qty": flt(d.transfer_qty), + "incoming_rate": flt(d.valuation_rate), + }, + ) + sle["handling_unit"] = d.handling_unit + sle["is_cancelled"] = 0 + hu_sles.append(sle) return hu_sles diff --git a/beam/beam/print_format/labelary_print_preview/labelary_print_preview.json b/beam/beam/print_format/labelary_print_preview/labelary_print_preview.json index 3a015b3a..fba3563e 100644 --- a/beam/beam/print_format/labelary_print_preview/labelary_print_preview.json +++ b/beam/beam/print_format/labelary_print_preview/labelary_print_preview.json @@ -10,7 +10,7 @@ "docstatus": 0, "doctype": "Print Format", "font_size": 14, - "html": "
\n \n
\n", + "html": "
\n \n
\n", "idx": 0, "line_breaks": 0, "margin_bottom": 15.0, diff --git a/beam/beam/print_format/microqr_serial_no/__init__.py b/beam/beam/print_format/microqr_serial_no/__init__.py new file mode 100644 index 00000000..b1279b72 --- /dev/null +++ b/beam/beam/print_format/microqr_serial_no/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2025, AgriTheory and contributors +# For license information, please see license.txt diff --git a/beam/beam/print_format/microqr_serial_no/microqr_serial_no.json b/beam/beam/print_format/microqr_serial_no/microqr_serial_no.json new file mode 100644 index 00000000..ea87ff73 --- /dev/null +++ b/beam/beam/print_format/microqr_serial_no/microqr_serial_no.json @@ -0,0 +1,33 @@ +{ + "absolute_value": 0, + "align_labels_right": 0, + "creation": "2025-08-11 13:33:35.315005", + "css": "", + "custom_format": 1, + "default_print_language": "en-US", + "disabled": 0, + "doc_type": "Serial No", + "docstatus": 0, + "doctype": "Print Format", + "font_size": 14, + "html": "{% set sn = get_serial_no(doc.name) %}\n\n\n
\n
0mm
\n
0mm
\n
0mm
\n
0mm
\n\n
\n
\n \n
\n
\n {{get_qr_code(sn.serial_no)}}\n
\n
\n
{{ sn.item_code or doc.item_code }}
\n
{{ sn.serial_no }}
\n
{{ frappe.utils.format_datetime(sn.posting_datetime) }}
\n
{{ sn.company }}
\n
\n\n
\n
\n\n", + "idx": 0, + "line_breaks": 0, + "margin_bottom": 15.0, + "margin_left": 15.0, + "margin_right": 15.0, + "margin_top": 15.0, + "modified": "2025-08-11 15:14:09.683495", + "modified_by": "Administrator", + "module": "BEAM", + "name": "MicroQR Serial No", + "owner": "Administrator", + "page_number": "Hide", + "pdf_generator": "wkhtmltopdf", + "print_format_builder": 0, + "print_format_builder_beta": 0, + "print_format_type": "Jinja", + "raw_printing": 0, + "show_section_headings": 0, + "standard": "Yes" +} diff --git a/beam/beam/printing.py b/beam/beam/printing.py index fb126ad7..c78ece1b 100644 --- a/beam/beam/printing.py +++ b/beam/beam/printing.py @@ -31,9 +31,14 @@ def print_by_server( ): print_settings = frappe.get_doc("Network Printer Settings", printer_setting) if isinstance(doc, str): - doc = frappe._dict(json.loads(doc)) + _doc = frappe._dict(json.loads(doc)) + doc = frappe.get_doc(_doc.doctype, _doc.name) + doc.update(_doc) if not print_format: print_format = frappe.get_meta(doctype).get("default_print_format") + # Default to "Standard" print format if still empty + if not print_format: + print_format = "Standard" print_format = frappe.get_doc("Print Format", print_format) try: cups.setServer(print_settings.server_ip) @@ -99,8 +104,9 @@ def print_handling_units( doctype=None, name=None, printer_setting=None, print_format=None, doc=None ): if isinstance(doc, str): - doc = frappe._dict(json.loads(doc)) - + _doc = frappe._dict(json.loads(doc)) + doc = frappe.get_doc(_doc.doctype, _doc.name) + doc.update(_doc) for row in doc.get("items"): if not row.get("handling_unit"): continue @@ -145,6 +151,18 @@ def labelary_api(doc, print_format, settings=None): e.globals.update(methods) template = e.from_string(print_format.raw_commands) output = template.render(doc=doc) - url = "http://api.labelary.com/v1/printers/8dpmm/labels/6x4/0/" + + # Extract label dimensions and DPI from settings + # dpmm: dots per millimeter (default 8 = ~203 DPI) + # width: label width in inches (default 6) + # height: label height in inches (default 4) + # index: label index for multi-label formats (default 0) + dpmm = settings.get("dpmm", 8) # 8 dpmm ≈ 203 DPI, 12 dpmm ≈ 300 DPI + width = settings.get("width", 6) + height = settings.get("height", 4) + index = settings.get("index", 0) + + url = f"http://api.labelary.com/v1/printers/{dpmm}dpmm/labels/{width}x{height}/{index}/" r = requests.post(url, files={"file": output}) - return base64.b64encode(r.content).decode("ascii") + content = r.content + return base64.b64encode(content).decode("ascii") diff --git a/beam/beam/scan/__init__.py b/beam/beam/scan/__init__.py index b61404da..b322b157 100644 --- a/beam/beam/scan/__init__.py +++ b/beam/beam/scan/__init__.py @@ -9,6 +9,9 @@ import frappe from erpnext.stock.doctype.stock_entry.stock_entry import StockEntry from erpnext.stock.get_item_details import get_item_details, get_valuation_rate +from frappe.query_builder import Case, DocType +from frappe.query_builder.custom import ConstantColumn +from frappe.query_builder.functions import Coalesce @frappe.whitelist() @@ -32,17 +35,56 @@ def scan( def get_barcode_context(barcode: str) -> frappe._dict | None: + # Get BEAM Settings for default company + company = frappe.defaults.get_defaults().get("company") + settings = None + if company and frappe.db.exists("BEAM Settings", {"company": company}): + settings = frappe.get_cached_doc("BEAM Settings", company) + item_barcode = frappe.db.get_value( "Item Barcode", {"barcode": barcode}, ["parent", "parenttype"], as_dict=True ) - if not item_barcode: - return None # mypy asked for this - return frappe._dict( - { - "doc": frappe.get_doc(item_barcode.parenttype, item_barcode.parent), - "barcode": barcode, - } - ) + if item_barcode: + return frappe._dict( + { + "doc": frappe.get_doc(item_barcode.parenttype, item_barcode.parent), + "barcode": barcode, + } + ) + elif not item_barcode and settings and settings.scan_serial_no: + serial_no_table = frappe.qb.DocType("Serial No") + bundle_entry_table = frappe.qb.DocType("Serial and Batch Entry") + bundle_table = frappe.qb.DocType("Serial and Batch Bundle") + serial_lookup = ( + ( + frappe.qb.from_(serial_no_table) + .select( + ConstantColumn("Serial No").as_("doctype"), + serial_no_table.name, + ) + .where(serial_no_table.name == barcode) + ) + .union( + frappe.qb.from_(bundle_entry_table) + .join(bundle_table) + .on(bundle_entry_table.parent == bundle_table.name) + .select( + ConstantColumn("Serial and Batch Bundle").as_("doctype"), + bundle_entry_table.parent, + ) + .where(bundle_entry_table.serial_no == barcode) + ) + .limit(1) + .run(as_dict=True) + ) + if serial_lookup: + return frappe._dict( + { + "doc": frappe.get_doc(serial_lookup[0].doctype, serial_lookup[0].name), + "barcode": barcode, + } + ) + return None def get_handling_unit(handling_unit: str, parent_doctype: str | None = None) -> frappe._dict: @@ -52,6 +94,7 @@ def get_handling_unit(handling_unit: str, parent_doctype: str | None = None) -> fields=[ "item_code", "SUM(actual_qty) AS stock_qty", + "company", "handling_unit", "voucher_no", "posting_date", @@ -139,6 +182,12 @@ def get_list_action(barcode_doc: frappe._dict, context: frappe._dict) -> list[di else: target = get_handling_unit(barcode_doc.doc.name) target = target.get("voucher_no") if target else None + elif barcode_doc.doc.doctype == "Serial No": + if context.get("listview") in ["Item", "Putaway Rule"]: + target = barcode_doc.doc.item_code + else: + target = get_serial_no(barcode_doc.doc.name, context.get("listview")) + target = target.get("voucher_no") if target else None if not target: return [] @@ -273,6 +322,121 @@ def get_form_action(barcode_doc: frappe._dict, context: frappe._dict) -> list[di return actions +def get_serial_no(serial_no: str, parent_doctype: str | None = None) -> frappe._dict: + sle = DocType("Stock Ledger Entry") + snb = DocType("Serial and Batch Entry") + snb_bundle = DocType("Serial and Batch Bundle") + se_detail = DocType("Stock Entry Detail") + pr_item = DocType("Purchase Receipt Item") + pi_item = DocType("Purchase Invoice Item") + dn_item = DocType("Delivery Note Item") + + main_query = ( + frappe.qb.from_(sle) + .left_join(snb_bundle) + .on(sle.serial_and_batch_bundle == snb_bundle.name) + .left_join(snb) + .on(snb_bundle.name == snb.parent) + .left_join(se_detail) + .on((sle.voucher_type == "Stock Entry") & (sle.voucher_detail_no == se_detail.name)) + .left_join(pr_item) + .on((sle.voucher_type == "Purchase Receipt") & (sle.voucher_detail_no == pr_item.name)) + .left_join(pi_item) + .on((sle.voucher_type == "Purchase Invoice") & (sle.voucher_detail_no == pi_item.name)) + .left_join(dn_item) + .on((sle.voucher_type == "Delivery Note") & (sle.voucher_detail_no == dn_item.name)) + .select( + sle.item_code, + sle.actual_qty.as_("stock_qty"), + sle.company, + sle.voucher_no, + sle.posting_date, + sle.posting_time, + sle.stock_uom, + sle.voucher_type, + sle.voucher_detail_no, + sle.warehouse, + sle.serial_and_batch_bundle, + Coalesce(snb.serial_no, sle.serial_no).as_("serial_no"), + # Item details from whichever child table matches + Coalesce(se_detail.uom, pr_item.uom, pi_item.uom, dn_item.uom).as_("uom"), + Coalesce(se_detail.qty, pr_item.qty, pi_item.qty, dn_item.qty).as_("qty"), + Coalesce( + se_detail.conversion_factor, + pr_item.conversion_factor, + pi_item.conversion_factor, + dn_item.conversion_factor, + ).as_("conversion_factor"), + Coalesce(se_detail.idx, pr_item.idx, pi_item.idx, dn_item.idx).as_("idx"), + Coalesce(se_detail.item_name, pr_item.item_name, pi_item.item_name, dn_item.item_name).as_( + "item_name" + ), + Coalesce(se_detail.name, pr_item.name, pi_item.name, dn_item.name).as_("detail_name"), + # Special field for Purchase Receipt + Case() + .when(sle.voucher_type == "Purchase Receipt", pr_item.stock_qty) + .else_(None) + .as_("stock_qty_field"), + # For Packing Slip case - get delivery note item details + Case() + .when( + (dn_item.docstatus == 0) + & ((snb.serial_no == serial_no) | (dn_item.serial_no.like(f"%{serial_no}%"))), + dn_item.name, + ) + .else_(None) + .as_("dn_detail"), + ) + .where( + (sle.is_cancelled == 0) + & ( + (snb.serial_no == serial_no) + | (sle.serial_no.like(f"%{serial_no}%")) # Serial and Batch method # Direct field method + ) + ) + .groupby(sle.voucher_no, sle.voucher_detail_no) + .orderby(sle.posting_date, order=frappe.qb.desc) + .orderby(sle.posting_time, order=frappe.qb.desc) + .limit(1) + ) + + result = main_query.run(as_dict=True) + + if not result: + return + + sle_data = frappe._dict(result[0]) + + if sle_data.stock_qty_field is not None: + sle_data.stock_qty = sle_data.stock_qty_field + + if parent_doctype == "Packing Slip" and sle_data.dn_detail: + sle_data.dn_detail = sle_data.dn_detail + + sle_data.qty = 1.0 + + if sle_data.conversion_factor and sle_data.conversion_factor != 0: + sle_data.stock_qty = sle_data.qty / sle_data.conversion_factor + else: + sle_data.stock_qty = sle_data.qty + + sle_data.posting_datetime = ( + datetime.datetime( + sle_data.posting_date.year, sle_data.posting_date.month, sle_data.posting_date.day + ) + + sle_data.posting_time + ) + + sle_data.user = frappe.session.user + sle_data.pop("posting_date", None) + sle_data.pop("posting_time", None) + sle_data.pop("voucher_detail_no", None) + sle_data.pop("stock_qty_field", None) + sle_data.pop("detail_name", None) + + return sle_data + + listview = { "Handling Unit": { "Delivery Note": [ @@ -424,6 +588,53 @@ def get_form_action(barcode_doc: frappe._dict, context: frappe._dict) -> list[di {"action": "route", "doctype": "Warehouse", "field": "Warehouse", "target": "target"} ], }, + "Serial No": { + "Delivery Note": [ + {"action": "filter", "doctype": "Delivery Note", "field": "name", "target": "target"} + ], + "Item": [{"action": "route", "doctype": "Item", "field": "Item", "target": "target"}], + "Packing Slip": [ + {"action": "filter", "doctype": "Packing Slip", "field": "name", "target": "target"} + ], + "Purchase Invoice": [ + { + "action": "filter", + "doctype": "Purchase Invoice", + "field": "name", + "target": "target", + } + ], + "Purchase Receipt": [ + { + "action": "route", + "doctype": "Purchase Receipt", + "field": "Purchase Receipt", + "target": "target", + } + ], + "Putaway Rule": [ + {"action": "filter", "doctype": "Putaway Rule", "field": "item_code", "target": "target"}, + ], + "Quality Inspection": [ + { + "action": "filter", + "doctype": "Quality Inspection", + "field": "handling_unit", + "target": "target", + }, + ], + "Stock Entry": [ + {"action": "filter", "doctype": "Stock Entry", "field": "name", "target": "target"} + ], + "Stock Reconciliation": [ + { + "action": "filter", + "doctype": "Stock Reconciliation", + "field": "name", + "target": "target", + } + ], + }, } frm = { @@ -792,4 +1003,178 @@ def get_form_action(barcode_doc: frappe._dict, context: frappe._dict) -> list[di }, ], }, + "Serial No": { + "Delivery Note": [ + { + "action": "add_or_associate", + "doctype": "Delivery Note Item", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Delivery Note Item", + "field": "rate", + "target": "target.rate", + "context": "target", + }, + ], + "Item Price": [ + { + "action": "set_item_code_and_handling_unit", + "doctype": "Item Price", + "field": "item_code", + "target": "target.item_code", + "context": "target", + }, + ], + "Packing Slip": [ + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "conversion_factor", + "target": "target.conversion_factor", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "pulled_quantity", + "target": "target.qty", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "rate", + "target": "target.rate", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "stock_qty", + "target": "target.stock_qty", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "warehouse", + "target": "target.warehouse", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Packing Slip Item", + "field": "dn_detail", + "target": "target.dn_detail", + "context": "target", + }, + ], + "Purchase Invoice": [ + { + "action": "add_or_associate", + "doctype": "Purchase Invoice Item", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + ], + "Putaway Rule": [ + { + "action": "set_item_code_and_handling_unit", + "doctype": "Putaway Rule", + "field": "item_code", + "target": "target.item_code", + "context": "target", + }, + ], + "Quality Inspection": [ + { + "action": "set_item_code_and_handling_unit", + "doctype": "Quality Inspection", + "field": "item_code", + "target": "target.item_code", + "context": "target", + }, + { + "action": "set_item_code_and_handling_unit", + "doctype": "Quality Inspection", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + ], + "Stock Entry": [ + { + "action": "add_or_associate", + "doctype": "Stock Entry Detail", + "field": "basic_rate", + "target": "target.valuation_rate", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Stock Entry Detail", + "field": "conversion_factor", + "target": "target.conversion_factor", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Stock Entry Detail", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Stock Entry Detail", + "field": "s_warehouse", + "target": "target.warehouse", + "context": "target", + }, + { + "action": "add_or_associate", + "doctype": "Stock Entry Detail", + "field": "transfer_qty", + "target": "target.stock_qty", + "context": "target", + }, + ], + "Stock Reconciliation": [ + { + "action": "add_or_associate", + "doctype": "Stock Reconciliation Item", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + ], + "Warranty Claim": [ + { + "action": "set_item_code_and_handling_unit", + "doctype": "Warranty Claim", + "field": "item_code", + "target": "target.item_code", + "context": "target", + }, + { + "action": "set_item_code_and_handling_unit", + "doctype": "Warranty Claim", + "field": "handling_unit", + "target": "target.handling_unit", + "context": "target", + }, + ], + }, } diff --git a/beam/beam/zpl_layout.py b/beam/beam/zpl_layout.py new file mode 100644 index 00000000..3276f376 --- /dev/null +++ b/beam/beam/zpl_layout.py @@ -0,0 +1,525 @@ +# Copyright (c) 2026, AgriTheory and contributors +# For license information, please see license.txt + +#!/usr/bin/env python3 +""" +ZPL Label Layout Tools - Extract coordinates from PDF labels and generate ZPL templates. + +Usage: + python zpl_layout.py /path/to/label.pdf --dpi 300 --width 6 --height 4 --output ./output/ + python zpl_layout.py /path/to/label.pdf --rotate # Portrait to landscape +""" +import argparse +import json +import sys +from pathlib import Path + +import pdfplumber + + +def analyze_pdf_label( + pdf_path, target_dpi=300, label_width_inches=6, label_height_inches=4, rotate_90=True +): + """ + Extract text blocks with coordinates from PDF and convert to ZPL coordinates. + + Args: + pdf_path: Path to PDF file + target_dpi: Target printer DPI (default 300) + label_width_inches: Label width in inches (landscape) + label_height_inches: Label height in inches (landscape) + rotate_90: If True, rotate portrait PDF to landscape ZPL + + Returns: + Dictionary with text blocks, barcodes, and coordinate mappings + """ + results = { + "label_dimensions": { + "width_dots": label_width_inches * target_dpi, + "height_dots": label_height_inches * target_dpi, + "dpi": target_dpi, + }, + "text_blocks": [], + "barcode_regions": [], + "lines": [], + } + + with pdfplumber.open(pdf_path) as pdf: + page = pdf.pages[0] # First page + + # Get PDF dimensions + pdf_width = page.width + pdf_height = page.height + + print(f"PDF dimensions: {pdf_width} x {pdf_height} points") + print( + f"Target ZPL: {results['label_dimensions']['width_dots']} x {results['label_dimensions']['height_dots']} dots" + ) + print(f"Rotation: {'90° CW (portrait → landscape)' if rotate_90 else 'None'}\n") + + # Extract text with coordinates + words = page.extract_words(x_tolerance=3, y_tolerance=3, keep_blank_chars=False) + + # Group words into text blocks (by proximity) + text_blocks = [] + current_block = [] + last_y = None + y_tolerance = 15 # Points tolerance for same line + + for word in words: + x0, y0, x1, y1 = word["x0"], word["top"], word["x1"], word["bottom"] + text = word["text"] + + # Convert PDF coordinates to ZPL with optional rotation + # PDF: origin bottom-left, Y increases upward + # ZPL: origin top-left, Y increases downward + + if rotate_90: + # Rotate 90° clockwise: portrait PDF (4"x6") → landscape ZPL (6"x4") + # New X = old Y (from top) + # New Y = pdf_width - old X + pdf_y_from_top = pdf_height - y1 # Convert to top-origin + zpl_x = int((pdf_y_from_top / pdf_height) * results["label_dimensions"]["width_dots"]) + zpl_y = int(((pdf_width - x0) / pdf_width) * results["label_dimensions"]["height_dots"]) + else: + # No rotation + zpl_x = int((x0 / pdf_width) * results["label_dimensions"]["width_dots"]) + zpl_y = int(((pdf_height - y1) / pdf_height) * results["label_dimensions"]["height_dots"]) + + # Detect potential barcode patterns + is_barcode = False + if text.startswith("(") and ")" in text: + # GS1 application identifier format like (420) + is_barcode = True + elif ( + text.replace(" ", "").replace("-", "").isdigit() + and len(text.replace(" ", "").replace("-", "")) > 10 + ): + # Long numeric string - likely tracking/serial number + is_barcode = True + + block_info = { + "text": text, + "pdf_coords": {"x": x0, "y": pdf_height - y1, "x1": x1, "y1": pdf_height - y0}, + "zpl_coords": {"x": zpl_x, "y": zpl_y}, + "width": int((x1 - x0) / pdf_width * results["label_dimensions"]["width_dots"]), + "height": int((y1 - y0) / pdf_height * results["label_dimensions"]["height_dots"]), + "is_potential_barcode": is_barcode, + } + + if is_barcode: + results["barcode_regions"].append(block_info) + + text_blocks.append(block_info) + + results["text_blocks"] = text_blocks + + # Detect horizontal lines (dividers) + lines = page.lines + for line in lines: + if rotate_90: + # Rotate the line coordinates + is_horizontal = abs(line["x0"] - line["x1"]) < 2 # Vertical in PDF becomes horizontal in ZPL + if is_horizontal: + pdf_y_from_top = pdf_height - line["y0"] + zpl_y = int( + ((pdf_width - line["x0"]) / pdf_width) * results["label_dimensions"]["height_dots"] + ) + zpl_x0 = int((pdf_y_from_top / pdf_height) * results["label_dimensions"]["width_dots"]) + zpl_x1 = int( + ((pdf_height - line["y1"]) / pdf_height) * results["label_dimensions"]["width_dots"] + ) + results["lines"].append( + { + "type": "horizontal", + "zpl_coords": {"x0": min(zpl_x0, zpl_x1), "y": zpl_y, "x1": max(zpl_x0, zpl_x1)}, + "length": abs(zpl_x1 - zpl_x0), + } + ) + else: + if abs(line["y0"] - line["y1"]) < 2: # Horizontal line + zpl_y = int( + ((pdf_height - line["y0"]) / pdf_height) * results["label_dimensions"]["height_dots"] + ) + zpl_x0 = int((line["x0"] / pdf_width) * results["label_dimensions"]["width_dots"]) + zpl_x1 = int((line["x1"] / pdf_width) * results["label_dimensions"]["width_dots"]) + results["lines"].append( + { + "type": "horizontal", + "zpl_coords": {"x0": zpl_x0, "y": zpl_y, "x1": zpl_x1}, + "length": zpl_x1 - zpl_x0, + } + ) + + return results + + +def smart_group_text(text_blocks, width, height): + """ + Intelligently group text blocks into logical sections based on layout. + """ + sections = {} + + # Sort blocks by Y position (top to bottom) + sorted_blocks = sorted(text_blocks, key=lambda b: (b["zpl_coords"]["y"], b["zpl_coords"]["x"])) + + # Define regions (for 6"x4" = 1800x1200) + regions = { + "top_bar": (0, 0, width, 150), # Top header bar + "main_addresses": (0, 150, width, 500), # Address blocks + "divider_1": (0, 500, width, 550), + "shipping_info": (0, 550, width, 800), # Postal/carrier info + "divider_2": (0, 800, width, 850), + "product_details": (0, 850, width, 1050), # PO/SKU/Description + "bottom_barcodes": (0, 1050, width, height), # Bottom barcode area + } + + for region_name, (x0, y0, x1, y1) in regions.items(): + sections[region_name] = [] + for block in sorted_blocks: + bx = block["zpl_coords"]["x"] + by = block["zpl_coords"]["y"] + if x0 <= bx < x1 and y0 <= by < y1: + sections[region_name].append(block) + + return sections + + +def generate_layout_map(sections, width, height): + """ + Generate a visual ASCII layout map. + """ + # Create a grid (scaled down) + grid_width = 90 # chars + grid_height = 24 # lines + scale_x = width / grid_width + scale_y = height / grid_height + + grid = [[" " for _ in range(grid_width)] for _ in range(grid_height)] + + # Draw borders + for x in range(grid_width): + grid[0][x] = "-" + grid[grid_height - 1][x] = "-" + for y in range(grid_height): + grid[y][0] = "|" + grid[y][grid_width - 1] = "|" + + # Place text blocks + for section_name, blocks in sections.items(): + for block in blocks: + x = int(block["zpl_coords"]["x"] / scale_x) + y = int(block["zpl_coords"]["y"] / scale_y) + if 1 < x < grid_width - 1 and 1 < y < grid_height - 1: + if block["is_potential_barcode"]: + grid[y][x] = "█" + else: + grid[y][x] = "·" + + return "\n".join("".join(row) for row in grid) + + +def print_analysis(analysis, sections): + """Print human-readable analysis.""" + print("=" * 80) + print("LABEL ANALYSIS - ZPL COORDINATE MAPPING") + print("=" * 80) + print( + f"\nLabel dimensions: {analysis['label_dimensions']['width_dots']} x {analysis['label_dimensions']['height_dots']} dots @ {analysis['label_dimensions']['dpi']} DPI" + ) + + print("\n" + "-" * 80) + print("SECTIONS") + print("-" * 80) + + for section_name, blocks in sections.items(): + if blocks: + print(f"\n### {section_name.upper().replace('_', ' ')}") + for block in blocks: + print(f" [{block['zpl_coords']['x']:4d}, {block['zpl_coords']['y']:4d}] \"{block['text']}\"") + + print("\n" + "-" * 80) + print("HORIZONTAL LINES (Dividers)") + print("-" * 80) + for line in analysis["lines"]: + print( + f" Y={line['zpl_coords']['y']:4d}, X=[{line['zpl_coords']['x0']:4d} to {line['zpl_coords']['x1']:4d}], Length={line['length']} dots" + ) + + print("\n" + "-" * 80) + print("BARCODE REGIONS") + print("-" * 80) + for barcode in analysis["barcode_regions"]: + print( + f" [{barcode['zpl_coords']['x']:4d}, {barcode['zpl_coords']['y']:4d}] \"{barcode['text']}\" (size: {barcode['width']}x{barcode['height']} dots)" + ) + + +def generate_zpl_template(analysis, sections): + """Generate a production-ready ZPL template with proper structure.""" + lines = [] + + # Header + dpi = analysis["label_dimensions"]["dpi"] + width_dots = analysis["label_dimensions"]["width_dots"] + height_dots = analysis["label_dimensions"]["height_dots"] + width_inches = width_dots / dpi + height_inches = height_dots / dpi + lines.append("{# Shipping Label - " + f'{width_inches}x{height_inches}" @ {dpi} DPI #}}') + lines.append( + "{% set label = zebra_zpl_label(width=" + + str(width_dots) + + ", length=" + + str(height_dots) + + ", dpi=" + + str(dpi) + + ") -%}" + ) + lines.append("") + lines.append("^XA {# Start Format #}") + lines.append(f"^PW{width_dots} " + "{# Print Width: " + str(width_dots) + " dots #}") + lines.append(f"^LL{height_dots} " + "{# Label Length: " + str(height_dots) + " dots #}") + lines.append("") + + # Top section - may contain store number or routing info + top_blocks = sections.get("top_bar", []) + if top_blocks: + lines.append("{# === TOP BAR SECTION === #}") + for block in sorted(top_blocks, key=lambda b: b["zpl_coords"]["x"]): + x, y = block["zpl_coords"]["x"], block["zpl_coords"]["y"] + text = block["text"] + lines.append(f"^FO{x},{y}^A0N,40,40^FD{text}^FS") + lines.append("") + + # Main address section + addr_blocks = sections.get("main_addresses", []) + if addr_blocks: + lines.append("{# === ADDRESS SECTION === #}") + lines.append("{# Ship From (Left Side) #}") + lines.append("^FO50,150^A0N,35,35^FDShip From:^FS") + lines.append("^FO50,200^A0N,28,28^FB700,5,0,L,0^FD{{ doc.ship_from_name }}^FS") + lines.append("^FO50,250^A0N,28,28^FB700,5,0,L,0^FD{{ doc.ship_from_address }}^FS") + lines.append("") + lines.append("{# Ship To (Right Side) #}") + mid_x = analysis["label_dimensions"]["width_dots"] / 2 + lines.append("^FO950,150^A0N,35,35^FDShip To:^FS") + lines.append("^FO950,200^A0N,28,28^FB800,5,0,L,0^FD{{ doc.ship_to_name }}^FS") + lines.append("^FO950,250^A0N,28,28^FB800,5,0,L,0^FD{{ doc.ship_to_address }}^FS") + lines.append("") + + # Horizontal divider + lines.append("{# === DIVIDER LINE === #}") + lines.append("^FO50,500^GB1700,3,3^FS") + lines.append("") + + # Shipping info section (postal code barcode + carrier info) + ship_blocks = sections.get("shipping_info", []) + if ship_blocks: + lines.append("{# === SHIPPING INFORMATION === #}") + lines.append("{# Postal Code Barcode (Left) #}") + lines.append("^FO50,520^A0N,25,25^FD(420) Ship to Postal Code^FS") + lines.append("^FO100,560^BY3^BCN,100,Y,N^FD(420){{ doc.ship_to_zip }}^FS") + lines.append("^FO120,680^A0N,30,30^FD(420) {{ doc.ship_to_zip }}^FS") + lines.append("") + lines.append("{# Carrier Information (Right) #}") + lines.append("^FO950,520^A0N,28,28^FDCarrier: {{ doc.carrier }}^FS") + lines.append("^FO950,560^A0N,28,28^FDPRO#: {{ doc.tracking_number }}^FS") + lines.append("^FO950,600^A0N,28,28^FDB/L#: {{ doc.bill_of_lading }}^FS") + lines.append( + "^FO950,640^A0N,28,28^FDNumber of Cartons: {{ doc.carton_number }} of {{ doc.total_cartons }}^FS" + ) + lines.append("") + + # Second divider + lines.append("{# === DIVIDER LINE === #}") + lines.append("^FO50,800^GB1700,3,3^FS") + lines.append("") + + # Product details section + prod_blocks = sections.get("product_details", []) + if prod_blocks: + lines.append("{# === PRODUCT DETAILS === #}") + lines.append("{# Left Column #}") + lines.append("^FO50,820^A0N,28,28^FDPO #: {{ doc.po_number }}^FS") + lines.append("^FO50,860^A0N,28,28^FDVendor Part #: {{ doc.vendor_part_number }}^FS") + lines.append("^FO50,900^A0N,28,28^FDUPC #: {{ doc.upc }}^FS") + lines.append("^FO50,940^A0N,28,28^FDCarton Qty: {{ doc.carton_qty }}^FS") + lines.append("") + lines.append("{# Right Column #}") + lines.append("^FO950,820^A0N,28,28^FDSKU #: {{ doc.sku }}^FS") + lines.append("^FO950,860^A0N,28,28^FDSize: {{ doc.size }}^FS") + lines.append("^FO950,900^A0N,28,28^FDColor: {{ doc.color }}^FS") + lines.append("^FO950,940^A0N,28,28^FDDescription: {{ doc.description }}^FS") + lines.append("") + + # Bottom barcode section (SSCC-18) + barcode_blocks = sections.get("bottom_barcodes", []) + if barcode_blocks: + lines.append("{# === BOTTOM SSCC BARCODE === #}") + lines.append("^FO200,1050^A0N,25,25^FDSSCC^FS") + lines.append("^FO150,1090^BY3^BCN,100,Y,N^FD{{ doc.sscc_barcode }}^FS") + lines.append("") + + # End format + lines.append("^XZ {# End Format #}") + + return "\n".join(lines) + + +def process_label(pdf_path, output_dir=None, dpi=300, width=6, height=4, rotate=True): + """ + Process a PDF label and generate ZPL template. + + Args: + pdf_path: Path to PDF file + output_dir: Directory to save outputs (default: creates 'output' next to PDF) + dpi: Target printer DPI + width: Label width in inches + height: Label height in inches + rotate: Whether to rotate 90 degrees + + Returns: + Dictionary with analysis results + """ + pdf_path = Path(pdf_path) + + if not pdf_path.exists(): + raise FileNotFoundError(f"PDF not found: {pdf_path}") + + # Determine output directory + if output_dir is None: + output_dir = pdf_path.parent / "output" + else: + output_dir = Path(output_dir) + + output_dir.mkdir(parents=True, exist_ok=True) + + print(f"\n{'='*80}") + print(f"Processing: {pdf_path.name}") + print(f"{'='*80}\n") + + # Analyze PDF + analysis = analyze_pdf_label( + str(pdf_path), + target_dpi=dpi, + label_width_inches=width, + label_height_inches=height, + rotate_90=rotate, + ) + + # Smart grouping + sections = smart_group_text( + analysis["text_blocks"], + analysis["label_dimensions"]["width_dots"], + analysis["label_dimensions"]["height_dots"], + ) + + # Print layout map + print("\nVISUAL LAYOUT MAP") + print("-" * 80) + layout_map = generate_layout_map( + sections, analysis["label_dimensions"]["width_dots"], analysis["label_dimensions"]["height_dots"] + ) + print(layout_map) + print() + + # Print analysis + print_analysis(analysis, sections) + + print("\n" + "=" * 80) + print("PRODUCTION-READY ZPL TEMPLATE") + print("=" * 80) + template = generate_zpl_template(analysis, sections) + print(template) + + # Save outputs + base_name = pdf_path.stem.lower().replace(" ", "_") + + # Save template + template_path = output_dir / f"{base_name}.zpl" + with open(template_path, "w") as f: + f.write(template) + print(f"\n✓ ZPL Template: {template_path}") + + # Save layout map + layout_path = output_dir / f"{base_name}_layout_map.txt" + with open(layout_path, "w") as f: + f.write(layout_map) + print(f"✓ Layout Map: {layout_path}") + + # Save detailed analysis + analysis_path = output_dir / f"{base_name}_analysis.json" + with open(analysis_path, "w") as f: + json.dump( + { + "label_dimensions": analysis["label_dimensions"], + "sections": { + k: [ + {"text": b["text"], "coords": b["zpl_coords"], "is_barcode": b["is_potential_barcode"]} + for b in v + ] + for k, v in sections.items() + if v + }, + "lines": analysis["lines"], + }, + f, + indent=2, + ) + print(f"✓ Analysis JSON: {analysis_path}") + + return analysis + + +def main(): + parser = argparse.ArgumentParser( + description="Extract coordinates from PDF labels and generate ZPL templates", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Basic usage (assumes portrait PDF to landscape 6x4 @ 300 DPI) + python zpl_layout.py /path/to/label.pdf + + # Specify output directory + python zpl_layout.py /path/to/label.pdf --output ./my_output/ + + # Custom dimensions (no rotation) + python zpl_layout.py /path/to/label.pdf --width 4 --height 6 --dpi 203 --no-rotate + + # Process multiple PDFs + for pdf in label_spec/*/label.pdf; do + python zpl_layout.py "$pdf" + done + """, + ) + + parser.add_argument("pdf", help="Path to PDF label file") + parser.add_argument("--output", "-o", help="Output directory (default: ./output/ next to PDF)") + parser.add_argument("--dpi", type=int, default=300, help="Target printer DPI (default: 300)") + parser.add_argument("--width", type=float, default=6, help="Label width in inches (default: 6)") + parser.add_argument("--height", type=float, default=4, help="Label height in inches (default: 4)") + parser.add_argument( + "--no-rotate", action="store_true", help="Do not rotate portrait to landscape" + ) + + args = parser.parse_args() + + try: + process_label( + args.pdf, + output_dir=args.output, + dpi=args.dpi, + width=args.width, + height=args.height, + rotate=not args.no_rotate, + ) + print(f"\n{'='*80}") + print("Processing complete!") + print(f"{'='*80}\n") + except Exception as e: + print(f"\nError: {e}\n", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/beam/customize.py b/beam/customize.py deleted file mode 100644 index 45b0a066..00000000 --- a/beam/customize.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2025, AgriTheory and contributors -# For license information, please see license.txt - -import json -from pathlib import Path - -import frappe - - -def load_customizations(): - customizations_directory = Path().cwd().parent / "apps" / "beam" / "beam" / "beam" / "custom" - files = list(customizations_directory.glob("**/*.json")) - for file in files: - customizations = json.loads(Path(file).read_text()) - for field in customizations.get("custom_fields"): - if field.get("module") != "BEAM": - continue - existing_field = frappe.get_value("Custom Field", field.get("name")) - custom_field = ( - frappe.get_doc("Custom Field", field.get("name")) - if existing_field - else frappe.new_doc("Custom Field") - ) - field.pop("modified") - {custom_field.set(key, value) for key, value in field.items()} - custom_field.flags.ignore_permissions = True - custom_field.flags.ignore_version = True - custom_field.save() - for prop in customizations.get("property_setters"): - if prop.get("module") != "BEAM": - continue - property_setter = frappe.get_doc( - { - "name": prop.get("name"), - "doctype": "Property Setter", - "doctype_or_field": prop.get("doctype_or_field"), - "doc_type": prop.get("doc_type"), - "field_name": prop.get("field_name"), - "property": prop.get("property"), - "value": prop.get("value"), - "property_type": prop.get("property_type"), - } - ) - property_setter.flags.ignore_permissions = True - property_setter.insert() diff --git a/beam/docs/assets/beam_settings.png b/beam/docs/assets/beam_settings.png index a8371ed4..f0716ed9 100644 Binary files a/beam/docs/assets/beam_settings.png and b/beam/docs/assets/beam_settings.png differ diff --git a/beam/docs/demand.md b/beam/docs/demand.md index 90c5013a..b79a6bca 100644 --- a/beam/docs/demand.md +++ b/beam/docs/demand.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Demand +
+ Rohan Bansal, Myuddin Khatri, Tyler Matteson, and ViralKansodiya-Fosserp 2024-09-02 +
+ + This feature computes the what Items are needed and where they are available. ### Demand Map diff --git a/beam/docs/form.md b/beam/docs/form.md index b4e6788f..d6062ff6 100644 --- a/beam/docs/form.md +++ b/beam/docs/form.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Form +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-05-28 +
+ + The result of scanning a barcode in the form depends on several factors: - Is the barcode recognized? @@ -14,6 +19,6 @@ For example, when an Item is scanned while viewing a Delivery Note record, it wi |-----------------|-----------------------|--------|--------| |Item|Delivery Note|add_or_increment|item_code| -Beam uses a [decision matrix](./matrix.md) to decide what action to take based on what kind of doctype has been scanned. +BEAM uses a [decision matrix](./matrix.md) to decide what action to take based on what kind of doctype has been scanned. Custom actions and client side functions can be added by using [hooks](./hooks.md). diff --git a/beam/docs/handling_unit.md b/beam/docs/handling_unit.md index eed9292f..8d572804 100644 --- a/beam/docs/handling_unit.md +++ b/beam/docs/handling_unit.md @@ -3,9 +3,14 @@ For license information, please see license.txt--> # Handling Unit +
+ Rohan Bansal, github-actions, Heather Kusmierz, Tyler Matteson, and Francisco Roldán 2025-05-28 +
+ + A Handling Unit is an abstraction for tracking quantities of items that are moved or stored together. It does not replace Batch or Serial numbers, the manufacture of an Item, or the functionality of the Product Bundle, but can supplement these as a way of conveniently grabbing information that would otherwise require a lot of keystrokes to enter. -By assigning a unique ID to the Handling Unit, it is possible to capture via scanner the item, net quantity, unit of measure and timestamp of the previous transaction, and then act upon that information in context, according to the [decision matrix](./matrix.md). Beam adds a new doctype, Handling Unit, to implement this functionality in ERPNext. +By assigning a unique ID to the Handling Unit, it is possible to capture via scanner the item, net quantity, unit of measure and timestamp of the previous transaction, and then act upon that information in context, according to the [decision matrix](./matrix.md). BEAM adds a new doctype, Handling Unit, to implement this functionality in ERPNext. ![Screen shot of the Handling Unit doctype listview. The list shows several new Handling Units that were created for items received via a Purchase Receipt.](./assets/handling_unit_list.png) @@ -76,10 +81,26 @@ When material is transferred from one warehouse to another, it will generate a n | Cocoplum | Work In Progress | 456 | 20 Ea | -When cancelling a Stock Entry, the user will be given an option to re-combine or let handling units remain tracked separately. +#### Cancelling Material Transfer Entries + +When cancelling a Material Transfer Stock Entry (including Send to Subcontractor and Material Transfer for Manufacture), a dialog appears asking whether to recombine handling units or keep them tracked separately. ![Screen shot of the recombine dialog](./assets/recombine.png) +The dialog shows each source handling unit along with its corresponding target handling unit that was created during the transfer. By default, all rows are pre-selected for recombination (the recommended action). + +**Recombine (Default):** When rows are selected and "Cancel and Recombine" is clicked: +- The source and target handling units are merged back together +- The original handling unit retains its full quantity as if the transfer never happened +- The target handling unit is removed from inventory +- This is the typical choice when correcting errors or undoing temporary transfers + +**Keep Separate:** When rows are unchecked before clicking "Cancel and Recombine": +- Both handling units remain in the system with their respective quantities +- Stock ledger entries are created to restore the quantities in both warehouses +- The handling units continue to be tracked independently +- Useful when you want to maintain the split for future reference or traceability + ### Repack and Manufacture In the case of a Repack, Material Issue or Material Consumption for Manufacture, a new Handling Unit is generated for the new quantities. @@ -98,6 +119,22 @@ In a case where less than the total quantity associated with a Handling Unit is | Cocoplum Puree | Work In Progress | 012 | 1 liter | | Cocoplum | Scrap | | 1 Ea | +#### Cancelling Repack and Manufacture Entries + +Similar to Material Transfer entries, when cancelling a Repack or Manufacture Stock Entry, a dialog appears to choose the recombine behavior. The dialog shows each consumed (source) handling unit paired with its corresponding produced (target) handling unit. All rows are pre-selected for recombination by default. + +**Recombine (Default):** When rows are selected: +- The consumed handling unit is restored to its original quantity +- The produced handling unit is removed from inventory +- The transformation is completely reversed +- Best for correcting data entry errors or voiding incorrect manufacturing entries + +**Keep Separate:** When rows are unchecked: +- The consumed handling unit receives its quantity back +- The produced handling unit also retains its quantity +- Both handling units coexist in inventory +- Useful for maintaining audit trails when a production run needs to be reversed but you want to preserve the separate handling unit records for compliance or tracking purposes + #### BOM Scrap Item In a Manufacturing or Repack Stock Entry, scrap items can be toggled to create a Handling Unit corresponding with their scrap quantity. This can be changed after a BOM is submitted. diff --git a/beam/docs/hooks.md b/beam/docs/hooks.md index f6cedbf4..66a2963e 100644 --- a/beam/docs/hooks.md +++ b/beam/docs/hooks.md @@ -1,9 +1,14 @@ -# Extending Beam With Custom Hooks +# Extending BEAM With Custom Hooks -Beam can be extended by adding configurations to your application's `hooks.py`. +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-05-28 +
+ + +BEAM can be extended by adding configurations to your application's `hooks.py`. To make scanning available on a custom doctype, add a table field for "Item Barcode" directly in the doctype or via customize form. Then add a key that is a peer with "Item" in the example below. diff --git a/beam/docs/hu_traceability_report.md b/beam/docs/hu_traceability_report.md index 2752d6e5..12080b4e 100644 --- a/beam/docs/hu_traceability_report.md +++ b/beam/docs/hu_traceability_report.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Handling Unit Traceability Report +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-02-14 +
+ + The Handling Unit Traceability report provides a simple interface to track a Handling Unit over its life cycle through your company's processes. Filters for the Handling Unit ID, Delivery Note name, and Sales Invoice name allow for fine-tuning of the report's results. ![Screen shot of the Handling Unit Traceability report's filter fields, including Handling Unit, Delivery Note, and Sales Invoice](./assets/hu_trace_filters.png) diff --git a/beam/docs/index.md b/beam/docs/index.md index c2706a5d..c2a7d6db 100644 --- a/beam/docs/index.md +++ b/beam/docs/index.md @@ -1,13 +1,17 @@ -# Beam +# BEAM -Beam is a general purpose 2D barcode scanning application for ERPNext. +
+ Rohan Bansal, Heather Kusmierz, Tyler Matteson, and Francisco Roldán 2025-05-28 +
+ +BEAM is a general purpose barcode scanning application for ERPNext. ## What does this application do? -Beam allows a user to scan a 2D barcode from either a listview or a form view, then helps enter data that would otherwise require numerous keystrokes. Unlike ERPNext's built-in barcode scanning, Beam expects the user to have a hardware barcode scanner connected to their device. +BEAM allows a user to scan a 2D or QR barcode from either a listview or a form view, then helps enter data that would otherwise require numerous keystrokes. Unlike ERPNext's built-in barcode scanning, BEAM expects the user to have a hardware barcode scanner connected to their device. For example, if the user scans a barcode associated with an Item in the Item listview, it will take them to that item's record. @@ -23,9 +27,9 @@ If the user scans an Item in a Delivery Note, it will populate everything it kno Read more about [how scanning in form views works](./form.md). -## Beam Settings +## BEAM Settings -Beam's version 15 introduced a new Beam Settings document to allow users to opt in or out of features in the app. Settings are unique on a per-company basis and are automatically generated (with default options) during certain related transactions if a Beam Settings document doesn't already exist for the company. Related transactions include submission of a Purchase Receipt, Purchase Invoice, or Stock Entry. +Version 15 introduced a new BEAM Settings document to allow users to opt in or out of features in the app. Settings are unique on a per-company basis and are automatically generated (with default options) during certain related transactions if a BEAM Settings document doesn't already exist for the company. Related transactions include submission of a Purchase Receipt, Purchase Invoice, or Stock Entry. ![Screen shot of the Beam Settings document for the fictitious Ambrosia Pie Company with Barcode Font size of 12, Enable Handling Units checked, Ignore Drop Shipped Items in Demand unchecked, and fields for Receiving Workstation and Shipping Workstation.](./assets/beam_settings.png) @@ -36,11 +40,29 @@ Settings options include: - **Enable Handling Units:** (default checked) enables the generation of Handling Units (see What is a Handling Unit section for more information) - **Ignore Drop Shipped Items in Demand:** (default unchecked) if checked, calculated demand from Sales Orders will ignore any items marked to be shipped by the supplier (drop shipped) +### QR Code Settings + +- **QR Scale:** (default 8) the module size in pixels used when generating QR code images — larger values produce a bigger image +- **QR Border:** (default 4) the quiet zone border size in modules surrounding the QR code +- **QR Error Correct:** (default M) the error correction level encoded into QR codes; options are L (7%), M (15%), Q (25%), and H (30%) — higher levels allow the code to remain scannable even if partially damaged, at the cost of a denser image + +### Barcode Generation + +The Barcode Generation section controls which document types receive an automatically generated Code128 barcode when saved. Any document type that has a Barcodes table (using the Item Barcode child doctype) is listed here. Checked items have auto-generation **enabled**; unchecked items are shown with a strikethrough and will not have barcodes generated on save. + +By default, **Item** and **Warehouse** are enabled. If a Code128 barcode already exists on a document, a new one will never be generated regardless of this setting. If you customize another doctype by adding a Item Barcode table, automatic generation can be configured here but still requires a `doc_event` hook to trigger, which can be configured in your app's `hooks.py` or in a Server Script. +```python +"Asset": { + "validate": [ + "beam.beam.barcodes.create_beam_barcode", + ] +}, +``` ## What is a Handling Unit? A Handling Unit is the combination of a container, any packaging material, and the items within or on it. This could be a pallet of raw materials used in a manufacturing process, a crate containing several other Handling Units, or a delivery vehicle transporting the crates and pallets. -Handling Units have unique, scannable identification numbers that are used in any stock transaction involving the items contained within the unit. The ID allows the user to reference everything about the stock transaction, saved from previous transactions. It also enables you to track the Handling Unit throughout its life cycle. The Beam application includes a [Handling Unit Traceability report](./hu_traceability_report.md) to summarize the transactions, related documents, quantities, and warehouses that involved a given Handling Unit. +Handling Units have unique, scannable identification numbers that are used in any stock transaction involving the items contained within the unit. The ID allows the user to reference everything about the stock transaction, saved from previous transactions. It also enables you to track the Handling Unit throughout its life cycle. The BEAM application includes a [Handling Unit Traceability report](./hu_traceability_report.md) to summarize the transactions, related documents, quantities, and warehouses that involved a given Handling Unit. A Handling Unit is generated when materials are received or created in the manufacturing process. @@ -48,7 +70,7 @@ Read more [about Handling Units here](./handling_unit.md). ## Installation and Customization -Beam comes packed with features, but can be extended with custom hooks both on the server side and in the client as needed. See the following pages for detailed instructions on installing and customizing the application: +BEAM comes packed with features, but can be extended with custom hooks both on the server side and in the client as needed. See the following pages for detailed instructions on installing and customizing the application: - [Installation](https://github.com/agritheory/beam) - [Customization](./hooks.md) @@ -61,7 +83,7 @@ Warehouses may also have unique barcodes associated with them. The user can navi ## Print Server Integration -Beam offers the ability to print to raw input printers like Zebra printers directly from the browser. Also included are several debugging and example print formats. For more details about configuring this, see the [print server section](./print_server.md). +BEAM offers the ability to print to raw input printers like Zebra printers directly from the browser. Also included are several debugging and example print formats. For more details about configuring this, see the [print server section](./print_server.md). ### Zebra Printing diff --git a/beam/docs/listview.md b/beam/docs/listview.md index 2b47c8e4..1dc92136 100644 --- a/beam/docs/listview.md +++ b/beam/docs/listview.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Listview +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-05-28 +
+ + The result of scanning a barcode in the listview depends on several factors: - Is the barcode recognized? @@ -22,6 +27,6 @@ Another example: If an Item is scanned while viewing the Purchase Receipt list, |Item|Purchase Receipt|filter|item_code| -Beam uses a [decision matrix](./matrix.md) to decide what action to take based on what kind of doctype has been scanned. +BEAM uses a [decision matrix](./matrix.md) to decide what action to take based on what kind of doctype has been scanned. Custom actions and client side functions can be added by using [hooks](./hooks.md) diff --git a/beam/docs/matrix.md b/beam/docs/matrix.md index 3e2bf999..5fe860e2 100644 --- a/beam/docs/matrix.md +++ b/beam/docs/matrix.md @@ -2,6 +2,11 @@ For license information, please see license.txt--> # Listview Actions + +
+ Rohan Bansal and Tyler Matteson 2025-05-28 +
+ | Scanned Doctype | Listview | Action | Target | |-----------------|-----------------------|--------|--------| |Handling Unit|Delivery Note|route|Delivery Note| diff --git a/beam/docs/print_server.md b/beam/docs/print_server.md index 28bf6fed..542bae0c 100644 --- a/beam/docs/print_server.md +++ b/beam/docs/print_server.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Print Server +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-02-14 +
+ + There are several steps to get a print server connected in ERPNext. 1. First, the `pycups` dependency needs to be installed on the system, which in turn depends on the CUPS project's `libcups` library. See the following links for installation instructions: @@ -15,6 +20,8 @@ There are several steps to get a print server connected in ERPNext. ![Screen shot of the Network Printer Settings document fields, including Name, Printer Name, Server IP, and Port.](./assets/network_printer_settings.png) +The **Printer Name** field is an autocomplete that queries the configured CUPS server and displays available printers by their CUPS identifier, with the make/model and location shown as secondary text. Selecting a printer automatically fills in the **Printer Location** field from CUPS. The location can be edited freely — saving the record pushes the updated value back to CUPS, keeping the two in sync. The **Printer Type** field (`General Purpose` or `Label / RAW`) can be used to distinguish IPP or PDF printers from ZPL/raw label printers. + --- A convenient Print Handling Unit button on relevant doctypes enables the user to print new Handling Unit labels directly from the ERPNext user interface. diff --git a/beam/docs/testing.md b/beam/docs/testing.md index 04d877a2..9bf47c80 100644 --- a/beam/docs/testing.md +++ b/beam/docs/testing.md @@ -3,6 +3,11 @@ For license information, please see license.txt--> # Testing +
+ Rohan Bansal, Heather Kusmierz, and Tyler Matteson 2025-02-14 +
+ + ## Simulating a Scanner Open the browser console. This assumes a barcode of `'9968934975826708157'` which must be sent as a string. diff --git a/beam/docs/zebra_printing.md b/beam/docs/zebra_printing.md index 9334a993..0bb7d4e9 100644 --- a/beam/docs/zebra_printing.md +++ b/beam/docs/zebra_printing.md @@ -3,22 +3,27 @@ For license information, please see license.txt--> # Zebra Printing +
+ Rohan Bansal and Tyler Matteson 2025-02-14 +
+ + To create a Zebra print format, you need the following documents: - A ZPL Print Format made against Doctype that may contain barcodes (Item, Warehouse, Handling Units, etc.) that uses the available Jinja utility functions to generate ZPL code. - A document Print Format that uses the free Labelary API to convert the above ZPL code and generate a preview of the print output for the linked document. ### ZPL Code Generation -Currently, only three types of printable ZPL data can be generated with utilities within Beam: +Currently, only three types of printable ZPL data can be generated with utilities within BEAM: - `Text` - `Barcode` - `Label` -Beam uses the [py-zebra-zpl](https://github.com/mtking2/py-zebra-zpl) library to generate the above types, as it provides a basic interface to create ZPL code using Python objects. Please refer to the library's documentation for more information on how to use it. +BEAM uses the [py-zebra-zpl](https://github.com/mtking2/py-zebra-zpl) library to generate the above types, as it provides a basic interface to create ZPL code using Python objects. Please refer to the library's documentation for more information on how to use it. **Note:** Additional ZPL elements (like graphic fields) and commands (text mirroring, character encoding, etc.) can be developed separately and added as text directly to the ZPL Print Format. For more information, visit the [official documentation page](https://supportcommunity.zebra.com/s/article/ZPL-Command-Information-and-DetailsV2?language=en_US) or the [Labelary ZPL Programming Guide](https://labelary.com/zpl.html). -In addition, Beam exposes the following Jinja functions to be used within a Print Format: +In addition, BEAM exposes the following Jinja functions to be used within a Print Format: --- @@ -135,21 +140,34 @@ Additional arguments can be passed to the function to customize the text. Please #### `labelary_api` -Generate an encoded Zebra printing label via the free Labelary API. It takes the following arguments: +Generate an encoded Zebra printing label preview via the free Labelary API. Converts ZPL code to a PNG image for preview purposes. It takes the following arguments: - `doc`: The document to be printed. Required. - `print_format`: The ZPL Print Format to be used for generating the label. Required. - `settings`: Additional settings to be passed to the Labelary API. Allows setting up the following parameters: - - `dpmm`: The desired print density, in dots per millimeter. Defaults to 8. + - `dpmm`: The desired print density, in dots per millimeter. Defaults to 8 (≈203 DPI). Use 12 for 300 DPI printers. - `width`: The desired label width, in inches. Defaults to 6. - `height`: The desired label height, in inches. Defaults to 4. - `index`: The label index (base 0). Some ZPL code will generate multiple labels, and this parameter can be used to access these different labels. Defaults to 0. -##### Example +**Important:** The `width` and `height` settings **MUST match the label dimensions used in your ZPL format**, otherwise the image will appear stretched or compressed. The `dpmm` setting should also match your printer's DPI. + +##### Example: 6x4" label at 203 DPI ```jinja - + ``` +##### Example: 4x6" label at 300 DPI +```jinja + +``` + +##### DPI Reference +| Printer Type | DPI | DPMM | +|---|---|---| +| Standard | 203 | 8 | +| High Resolution | 300 | 12 | + --- #### `get_handling_unit` @@ -182,3 +200,187 @@ Add text, barcodes, and other printable elements to a ZPL label. It takes the fo {% add_to_label(label, barcode) %} {{ label.dump_contents() }} ``` + +--- + +## ZPL Label Layout Tools + +The ZPL Layout Tools are designed to accelerate the process of creating ZPL label templates by automatically extracting text coordinates from PDF shipping label samples and generating production-ready ZPL templates with correct coordinates. + +### Overview + +Instead of manually measuring and calculating ZPL dot coordinates for every label element, you can: + +1. Run the layout analysis tool against a sample PDF label +2. Get an automatically generated ZPL template with all coordinates mapped +3. Customize as needed for your specific document fields +4. Integrate into BEAM print formats + +### Command Line Tool + +The layout analysis tool is available as a standalone command-line utility at `beam/beam/zpl_layout.py`. + +#### Usage + +```bash +# Activate the virtual environment +source /path/to/env/bin/activate +cd /path/to/beam + +# Basic usage (assumes portrait PDF, 6x4" landscape output @ 300 DPI) +python beam/beam/zpl_layout.py /path/to/label.pdf + +# Specify custom label dimensions +python beam/beam/zpl_layout.py /path/to/label.pdf --width 4 --height 6 --dpi 203 + +# Disable rotation (for already-landscape PDFs) +python beam/beam/zpl_layout.py /path/to/label.pdf --no-rotate + +# Custom output directory +python beam/beam/zpl_layout.py /path/to/label.pdf --output ./my_templates/ +``` + +#### Options + +- `pdf`: Path to the PDF file to analyze (required) +- `--output, -o`: Output directory (default: creates `output/` directory next to PDF) +- `--dpi`: Target printer DPI - 203 or 300 (default: 300) +- `--width`: Label width in inches (default: 6) +- `--height`: Label height in inches (default: 4) +- `--no-rotate`: Do not rotate portrait PDF to landscape + +### Output Files + +For each PDF processed, the tool generates three files in the output directory: + +#### 1. `{label_name}.zpl` - Production ZPL Template + +A Jinja2-compatible ZPL template with: +- All text coordinates automatically mapped +- Sections organized (addresses, shipping info, product details, barcodes) +- Variable placeholders (e.g., `{{ doc.ship_to_name }}`) ready for customization +- Comments indicating each section and coordinate values + +Example: +```jinja +{# Shipping Label - 6.0x4.0" @ 300 DPI #} +{% set label = zebra_zpl_label(width=1800.0, length=1200.0, dpi=300) -%} + +^XA {# Start Format #} +^PW1800.0 {# Print Width: 1800.0 dots #} +^LL1200.0 {# Label Length: 1200.0 dots #} + +{# === ADDRESS SECTION === #} +{# Ship From (Left Side) #} +^FO50,150^A0N,35,35^FDShip From:^FS +^FO50,200^A0N,28,28^FB700,5,0,L,0^FD{{ doc.ship_from_name }}^FS +^FO50,250^A0N,28,28^FB700,5,0,L,0^FD{{ doc.ship_from_address }}^FS + +... + +^XZ {# End Format #} +``` + +#### 2. `{label_name}_analysis.json` - Coordinate Data + +JSON file containing detailed extraction results: +- Label dimensions in dots and DPI +- Text blocks grouped by section (main_addresses, shipping_info, product_details, etc.) +- Each block includes: + - Text content + - ZPL X,Y coordinates (in dots) + - Barcode detection flag + +Use this for reference or further customization. + +#### 3. `{label_name}_layout_map.txt` - ASCII Visual Map + +ASCII art representation of the label layout showing: +- `·` for regular text blocks +- `█` for detected barcodes +- Borders indicating label dimensions + +Useful for visually verifying that coordinates were extracted correctly. + +### Integration into BEAM Print Formats + +Once you have a generated ZPL template: + +1. **Copy the template** into a new BEAM Print Format (create via Settings > Print Format) +2. **Replace variable placeholders** with actual document field references: + - `{{ doc.ship_from_name }}` → `{{ doc.supplier_name }}` (or your actual field) + - `{{ doc.po_number }}` → `{{ doc.purchase_order_number }}` + - etc. +3. **Test in Labelary viewer** at https://labelary.com/viewer.html + - Copy the ZPL code (with variables replaced by test data) + - Set label size to match your printer + - Verify layout and positioning +4. **Adjust coordinates as needed** based on actual print results + +### Key Features + +- **Automatic Barcode Detection**: Identifies GS1 Application Identifiers (e.g., `(420)`) and long numeric sequences +- **Rotation Support**: Automatically converts portrait PDFs (4"×6") to landscape (6"×4") +- **Multi-DPI Support**: Works with 203 DPI and 300 DPI printers +- **Section Grouping**: Intelligently organizes extracted text into logical regions +- **Visual Feedback**: ASCII layout map shows element positions for verification + +### Coordinate System + +The tool converts between different coordinate systems: + +| System | Origin | Y-Axis | Units | Example | +|--------|--------|--------|-------|---------| +| PDF | Bottom-left | Increases upward | Points | (x0, y0) in pdfplumber | +| ZPL | Top-left | Increases downward | Dots | ^FO{x},{y} in ZPL | + +Conversion formula: `zpl_dots = pdf_points × (target_dpi / 72)` + +### DPI/DPMM Reference + +When using the `labelary_api` helper or generating ZPL templates, ensure label dimensions match across all components: + +| DPI | DPMM | Printer Type | Example | +|-----|------|--------------|---------| +| 203 | 8 | Standard Zebra | Most common thermal printers | +| 300 | 12 | High Resolution | Better quality labels | + +**Critical:** Always pass the correct `dpmm` value to `labelary_api` to avoid image stretching. If your ZPL template is 6x4" at 300 DPI but you pass `dpmm: 8`, the preview will appear stretched horizontally. + +Example configurations: +- 6x4" label at 203 DPI: `labelary_api(doc, 'Format Name', {'width': 6, 'height': 4, 'dpmm': 8})` +- 4x6" label at 300 DPI: `labelary_api(doc, 'Format Name', {'width': 4, 'height': 6, 'dpmm': 12})` + +### Troubleshooting + +**Coordinates seem incorrect:** +- Verify the PDF orientation (portrait vs. landscape) +- Try with `--no-rotate` flag if PDF is already landscape +- Check that DPI matches your printer specification + +**Text not grouped correctly:** +- The section boundaries may need adjustment for non-standard label layouts +- Use the JSON analysis file to see exactly where text was detected +- Consider manually adjusting section coordinates in the generated template + +**Missing elements:** +- Some PDF elements (images, lines) may not be extracted +- pdfplumber extracts text only; complex graphics may need manual addition +- Review the layout map to identify missing elements + +### Example: Processing Trading Partner Labels + +The `label_spec/` folder contains sample PDFs from multiple trading partners. To generate templates for all: + +```bash +cd /path/to/beam +source /path/to/env/bin/activate + +# Pure Hockey (6x4 with rotation) +python beam/beam/zpl_layout.py label_spec/Pure\ Hockey\ -\ ASN\ label/*.pdf + +# Mindware (4x6 already landscape) +python beam/beam/zpl_layout.py "label_spec/Mindware - Oriental Trading Co - Carton label/*.pdf" --width 4 --height 6 --no-rotate +``` + +Templates are automatically saved to `label_spec/{partner}/output/` for easy access. diff --git a/beam/hooks.py b/beam/hooks.py index 82cfb93a..b00d7d1e 100644 --- a/beam/hooks.py +++ b/beam/hooks.py @@ -8,6 +8,7 @@ app_description = "Barcode Scanning for ERPNext" app_email = "support@agritheory.dev" app_license = "MIT" +required_apps = ["erpnext"] # Includes in # ------------------ @@ -31,7 +32,10 @@ # page_js = {"page" : "public/js/file.js"} # include js in doctype views -doctype_js = {"Stock Entry": "public/js/stock_entry_custom.js"} +doctype_js = { + "Network Printer Settings": "public/js/network_printer_settings_custom.js", + "Stock Entry": "public/js/stock_entry_custom.js", +} # doctype_list_js = {"doctype" : "public/js/doctype_list.js"} # doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"} # doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"} @@ -60,6 +64,7 @@ "methods": [ "beam.beam.barcodes.add_to_label", "beam.beam.barcodes.barcode128", + "beam.beam.barcodes.get_qr_code", "beam.beam.barcodes.formatted_zpl_barcode", "beam.beam.barcodes.formatted_zpl_label", "beam.beam.barcodes.formatted_zpl_text", @@ -68,6 +73,7 @@ "beam.beam.barcodes.zebra_zpl_text", "beam.beam.printing.labelary_api", "beam.beam.scan.get_handling_unit", + "beam.beam.scan.get_serial_no", ], } @@ -111,6 +117,7 @@ # Override standard doctype classes override_doctype_class = { "Sales Order": "beam.beam.overrides.sales_order.BEAMSalesOrder", + "Network Printer Settings": "beam.beam.overrides.network_printer_settings.BEAMNetworkPrinterSettings", "Stock Entry": "beam.beam.overrides.stock_entry.BEAMStockEntry", "Subcontracting Receipt": "beam.beam.overrides.subcontracting_receipt.BEAMSubcontractingReceipt", "Work Order": "beam.beam.overrides.work_order.BEAMWorkOrder", @@ -172,6 +179,11 @@ "beam.beam.demand.demand.modify_allocations", ], }, + "Company": { + "after_insert": [ + "beam.beam.overrides.company.create_company_beam_settings", + ], + }, } # Types diff --git a/beam/install.py b/beam/install.py index 001e8aeb..e76a11ee 100644 --- a/beam/install.py +++ b/beam/install.py @@ -9,7 +9,6 @@ from beam.beam.demand.demand import build_demand_allocation_map from beam.beam.demand.receiving import reset_build_receiving_map from beam.beam.scan.config import get_scan_doctypes -from beam.customize import load_customizations from beam.patches.v15.setup_beam_mobile_settings import execute @@ -22,7 +21,6 @@ def create_beam_mobile_user_role(): def after_install(): - load_customizations() print("Setting up Handling Unit Inventory Dimension") if frappe.db.exists("Inventory Dimension", "Handling Unit"): return diff --git a/beam/patches/.gitkeep b/beam/patches/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/beam/public/js/network_printer_settings_custom.js b/beam/public/js/network_printer_settings_custom.js new file mode 100644 index 00000000..e98acf69 --- /dev/null +++ b/beam/public/js/network_printer_settings_custom.js @@ -0,0 +1,52 @@ +// Copyright (c) 2025, AgriTheory and contributors +// For license information, please see license.txt + +let printers_cache = [] +let pending_printer_name = null + +function set_location_from_cache(frm, printer_name) { + const match = printers_cache.find(p => p.value === printer_name) + frm.set_value('printer_location', match ? match.location || '' : '') +} + +frappe.ui.form.on('Network Printer Settings', { + after_save(frm) { + // Refresh cache from CUPS so subsequent printer_name changes + // reflect the just-saved location, not stale pre-load data. + printers_cache = [] + frm.trigger('connect_print_server') + }, + connect_print_server(frm) { + if (frm.doc.server_ip && frm.doc.port) { + frappe.call({ + doc: frm.doc, + method: 'get_printers_list', + args: { + ip: frm.doc.server_ip, + port: frm.doc.port, + }, + callback(data) { + printers_cache = data.message || [] + frm.fields_dict.printer_name.set_data(printers_cache) + // Resolve any pending printer_name lookup that fired before cache was ready + if (pending_printer_name) { + set_location_from_cache(frm, pending_printer_name) + pending_printer_name = null + } + }, + }) + } + }, + printer_name(frm) { + if (!frm.doc.printer_name) { + return + } + if (!printers_cache.length) { + // Cache not populated yet — queue the lookup and trigger a fetch + pending_printer_name = frm.doc.printer_name + frm.trigger('connect_print_server') + return + } + set_location_from_cache(frm, frm.doc.printer_name) + }, +}) diff --git a/beam/public/js/print/print.js b/beam/public/js/print/print.js index 70a54a49..238ffb54 100644 --- a/beam/public/js/print/print.js +++ b/beam/public/js/print/print.js @@ -40,47 +40,49 @@ function custom_print_button(frm) { if (frm.doc.docstatus != 1) { return } - frappe.db.get_value('BEAM Settings', { company: frm.doc.company }, 'enable_handling_units', r => { - if (r && r.enable_handling_units) { - frm.add_custom_button(__(' Print Handling Unit'), () => { - let d = new frappe.ui.Dialog({ - title: __('Select Printer Setting'), - fields: [ - { - label: __('Printer Setting'), - fieldname: 'printer_setting', - fieldtype: 'Link', - options: 'Network Printer Settings', - }, - { - label: __('Printer Format'), - fieldname: 'print_format', - fieldtype: 'Link', - options: 'Print Format', - get_query: function () { - return { - filters: { doc_type: 'Handling Unit' }, - } - }, - }, - ], - primary_action_label: 'Select', - primary_action(selection) { - d.hide() - frappe.call({ - method: 'beam.beam.printing.print_handling_units', - args: { - doctype: frm.doc.doctype, - name: frm.doc.name, - printer_setting: selection.printer_setting, - print_format: selection.print_format, - doc: frm.doc, - }, - }) + const beam_settings = frappe.boot.beam?.settings?.[frm.doc.company] + if (!beam_settings?.enable_handling_units) { + return + } + frm.add_custom_button(__(' Print Handling Unit'), () => { + let d = new frappe.ui.Dialog({ + title: __('Select Printer Setting'), + fields: [ + { + label: __('Printer Setting'), + fieldname: 'printer_setting', + fieldtype: 'Link', + options: 'Network Printer Settings', + default: frappe.defaults.get_user_default('Network Printer Settings'), + }, + { + label: __('Print Format'), + fieldname: 'print_format', + fieldtype: 'Link', + options: 'Print Format', + default: frappe.boot.beam?.default_hu_print_format, + get_query: function () { + return { + filters: { doc_type: 'Handling Unit' }, + } + }, + }, + ], + primary_action_label: 'Select', + primary_action(selection) { + d.hide() + frappe.call({ + method: 'beam.beam.printing.print_handling_units', + args: { + doctype: frm.doc.doctype, + name: frm.doc.name, + printer_setting: selection.printer_setting, + print_format: selection.print_format, + doc: frm.doc, }, }) - d.show() - }) - } + }, + }) + d.show() }) } diff --git a/beam/public/js/stock_entry_custom.js b/beam/public/js/stock_entry_custom.js index dd1c6c34..09f68642 100644 --- a/beam/public/js/stock_entry_custom.js +++ b/beam/public/js/stock_entry_custom.js @@ -23,8 +23,8 @@ frappe.ui.form.on('Stock Entry', { async function show_handling_unit_recombine_dialog(frm) { const data = await get_handling_units(frm) - if (!data) { - return new Promise(resolve => {}) + if (!data || !data.length) { + return [] } let fields = [ { @@ -35,6 +35,14 @@ async function show_handling_unit_recombine_dialog(frm) { disabled: 0, hidden: 1, }, + { + fieldtype: 'Data', + fieldname: 'target_row_name', + in_list_view: 0, + read_only: 1, + disabled: 0, + hidden: 1, + }, { fieldtype: 'Link', fieldname: 'item_code', @@ -43,6 +51,7 @@ async function show_handling_unit_recombine_dialog(frm) { read_only: 1, disabled: 0, label: __('Item Code'), + columns: 2, }, { fieldtype: 'Data', @@ -57,6 +66,7 @@ async function show_handling_unit_recombine_dialog(frm) { label: __('Handling Unit'), in_list_view: 1, read_only: 1, + columns: 2, }, { fieldtype: 'Float', @@ -64,6 +74,7 @@ async function show_handling_unit_recombine_dialog(frm) { label: __('Remaining Qty'), in_list_view: 1, read_only: 1, + columns: 1, }, { fieldtype: 'Data', @@ -71,6 +82,7 @@ async function show_handling_unit_recombine_dialog(frm) { label: __('Handling Unit to recombine'), in_list_view: 1, read_only: 1, + columns: 2, }, { fieldtype: 'Float', @@ -78,6 +90,7 @@ async function show_handling_unit_recombine_dialog(frm) { label: __('Transferred Qty'), in_list_view: 1, read_only: 1, + columns: 1, }, ] @@ -88,10 +101,9 @@ async function show_handling_unit_recombine_dialog(frm) { { fieldname: 'handling_units', fieldtype: 'Table', - in_place_edit: false, - editable_grid: false, cannot_add_rows: true, - cannot_delete_rows: true, + cannot_delete_rows: false, + reqd: 1, data: data, get_data: () => { return data @@ -103,9 +115,14 @@ async function show_handling_unit_recombine_dialog(frm) { }, ], primary_action: () => { - let to_recombine = dialog.fields_dict.handling_units.grid.get_selected_children().map(row => { - return row.row_name - }) + let selected = dialog.fields_dict.handling_units.grid.get_selected_children() + let to_recombine = [] + for (let row of selected) { + to_recombine.push(row.row_name) + if (row.target_row_name) { + to_recombine.push(row.target_row_name) + } + } dialog.hide() return resolve(to_recombine) }, @@ -113,14 +130,39 @@ async function show_handling_unit_recombine_dialog(frm) { size: 'extra-large', }) dialog.show() + // Pre-check all rows so recombine is the default behavior + setTimeout(() => { + const grid = dialog.fields_dict.handling_units.grid + // Enable and check all rows + if (grid.wrapper) { + grid.wrapper.find('.grid-row-check').prop('disabled', false).prop('checked', true) + // Hide the Delete button + grid.wrapper.find('.grid-remove-rows').hide() + } + grid.grid_rows?.forEach(row => { + if (row.doc) { + row.doc.__checked = 1 + if (row.row) { + row.row.find('.grid-row-check').prop('disabled', false).prop('checked', true) + } + } + }) + grid.refresh() + }, 200) dialog.get_close_btn() }) } async function get_handling_units(frm) { let handling_units = [] + const transfer_types = ['Material Transfer', 'Send to Subcontractor', 'Material Transfer for Manufacture'] + for (const row of frm.doc.items) { - if (row.handling_unit && row.to_handling_unit) { + if (!row.handling_unit) continue + + if (transfer_types.includes(frm.doc.purpose)) { + // Material Transfer types: source and destination HU are on the same row + if (!row.to_handling_unit) continue let remaining_qty = await get_handling_unit_stock_qty(frm.doc.name, row.handling_unit, row.s_warehouse) handling_units.push({ row_name: row.name, @@ -131,8 +173,25 @@ async function get_handling_units(frm) { remaining_qty: remaining_qty, transferred_qty: row.qty, }) + } else { + // Repack/Manufacture/etc: source and target HUs are on separate rows + // Only show source rows (those with s_warehouse); pair with matching target row + if (!row.s_warehouse) continue + let target_row = frm.doc.items.find(r => r.t_warehouse && r.handling_unit && r.item_code === row.item_code) + let remaining_qty = await get_handling_unit_stock_qty(frm.doc.name, row.handling_unit, row.s_warehouse) + handling_units.push({ + row_name: row.name, + target_row_name: target_row?.name || '', + item_code: row.item_code, + item_name: row.item_name, + handling_unit: row.handling_unit, + to_handling_unit: target_row?.handling_unit || '', + remaining_qty: remaining_qty, + transferred_qty: row.transfer_qty || row.qty, + }) } } + return handling_units } async function get_handling_unit_stock_qty(name, handling_unit, s_warehouse) { @@ -146,6 +205,10 @@ async function get_handling_unit_stock_qty(name, handling_unit, s_warehouse) { //re combine async function set_recombine_handling_units(frm) { + // const beam_settings = frappe.boot.beam?.settings?.[frm.doc.company] + // if (!beam_settings?.enable_handling_units) { + // return + // } let to_recombine = await show_handling_unit_recombine_dialog(frm) await frappe.xcall('beam.beam.overrides.stock_entry.set_rows_to_recombine', { docname: frm.doc.name, diff --git a/beam/tests/fixtures.py b/beam/tests/fixtures.py index bf60c591..19e8e484 100644 --- a/beam/tests/fixtures.py +++ b/beam/tests/fixtures.py @@ -370,6 +370,19 @@ "default_warehouse": "Kitchen - APC", "supplier": "Freedom Provisions", }, + { + "item_code": "Whipped Cream Canister", + "uom": "Nos", + "item_group": "Bakery Supplies", + "default_warehouse": "Storeroom - APC", + "description": "Pressurized whipped cream canister for serving pies; also sold retail.", + "item_price": 2.75, + "supplier": "Unity Bakery Supply", + "is_sales_item": 1, + "is_purchase_item": 1, + "has_serial_no": 1, + "serial_no_series": "WCC-.#####", + }, ] boms = [ @@ -715,7 +728,7 @@ }, "phone": "(704) 885-0542", "roles": ["Stock Manager", "Item Manager"], - "department": "Management", + "department": "Management - APC", "designation": "Bakery Manager", }, { @@ -732,7 +745,7 @@ "phone": "(658) 583-5499", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -749,7 +762,7 @@ "phone": "(962) 762-5895", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -766,7 +779,7 @@ "phone": "(366) 357-8223", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Bakery Manager", }, { @@ -783,7 +796,7 @@ "phone": "(930) 920-4520", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -800,7 +813,7 @@ "phone": "(054) 893-8970", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -817,7 +830,7 @@ "phone": "(814) 677-9322", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -834,7 +847,7 @@ "phone": "(133) 195-7828", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, { @@ -851,7 +864,7 @@ "phone": "(041) 000-2569", "roles": ["Stock User", "BEAM Mobile User"], "reports_to": "Tristan Hawkins", - "department": "Operations", + "department": "Operations - APC", "designation": "Baker", }, ] diff --git a/beam/tests/setup.py b/beam/tests/setup.py index a437efd0..c17f8369 100644 --- a/beam/tests/setup.py +++ b/beam/tests/setup.py @@ -192,8 +192,11 @@ def setup_manufacturing_settings(settings): def setup_beam_settings(settings): - beams = frappe.new_doc("BEAM Settings") - beams.company = settings.company + if frappe.db.exists("BEAM Settings", settings.company): + beams = frappe.get_doc("BEAM Settings", settings.company) + else: + beams = frappe.new_doc("BEAM Settings") + beams.company = settings.company beams.enable_demand = True beams.enable_handling_units = True beams.receiving_workstation = "Receiving" @@ -300,8 +303,15 @@ def create_items(settings): "Purchase" if item.get("item_group") in ("Bakery Supplies", "Ingredients") else "Manufacture" ) i.valuation_method = "FIFO" - i.is_purchase_item = item.get("item_group") in ("Bakery Supplies", "Ingredients") - i.is_sales_item = item.get("item_group") == "Baked Goods" + i.is_purchase_item = ( + 1 + if item.get("item_group") in ("Bakery Supplies", "Ingredients") + or item.get("is_purchase_item", 0) + else 0 + ) + i.is_sales_item = ( + 1 if item.get("item_group") == "Baked Goods" or item.get("is_sales_item", 0) else 0 + ) i.append( "item_defaults", {"company": settings.company, "default_warehouse": item.get("default_warehouse")}, @@ -315,6 +325,9 @@ def create_items(settings): i.append("uoms", {"uom": "Gallon Liquid (US)", "conversion_factor": 15.142}) i.purchase_uom = "Gallon Liquid (US)" i.valuation_rate = 0.01 if i.item_code == "Water" else 0.02 + + i.has_serial_no = item.get("has_serial_no", 0) or 0 + i.serial_no_series = item.get("serial_no_series", "") or "" i.save() if item.get("item_price"): ip = frappe.new_doc("Item Price") @@ -692,9 +705,12 @@ def create_production_plan(settings, prod_plan_from_doc): wo.save() wo.submit() frappe.db.set_value("Work Order", wo.name, "creation", start_time) - job_cards = frappe.get_all("Job Card", {"work_order": wo.name}) - for job_card in job_cards: - job_card = frappe.get_doc("Job Card", job_card) + # Get job cards and sort by sequence_id to process in order + job_cards = frappe.get_all( + "Job Card", {"work_order": wo.name}, ["name", "sequence_id"], order_by="sequence_id asc" + ) + for jc in job_cards: + job_card = frappe.get_doc("Job Card", jc.name) batch_size, total_operation_time = frappe.get_value( "Operation", job_card.operation, ["batch_size", "total_operation_time"] ) @@ -709,6 +725,8 @@ def create_production_plan(settings, prod_plan_from_doc): "remaining_time_in_mins": time_in_mins, }, ) + # Complete the job card + job_card.total_completed_qty = wo.qty job_card.save() start_time = job_card.time_logs[0].to_time + datetime.timedelta(minutes=2) # job_card.submit() # TODO: don't submit for demand tests diff --git a/beam/tests/test_barcode_auto_generate.py b/beam/tests/test_barcode_auto_generate.py new file mode 100644 index 00000000..1115a780 --- /dev/null +++ b/beam/tests/test_barcode_auto_generate.py @@ -0,0 +1,74 @@ +# Copyright (c) 2025, AgriTheory and contributors +# For license information, please see license.txt + +import frappe +import pytest + +from beam.beam.barcodes import create_beam_barcode +from beam.beam.doctype.beam_settings.beam_settings import get_doctypes_with_item_barcodes + + +def test_get_doctypes_with_item_barcodes(): + doctypes = get_doctypes_with_item_barcodes() + assert isinstance(doctypes, list) + assert "Item" in doctypes + assert "Warehouse" in doctypes + # all returned values must be real doctypes + for dt in doctypes: + assert frappe.db.exists("DocType", dt), f"Stale DocField reference: '{dt}' does not exist" + + +def _make_item(item_code): + if frappe.db.exists("Item", item_code): + item = frappe.get_doc("Item", item_code) + item.barcodes = [] + return item + item = frappe.new_doc("Item") + item.item_code = item_code + item.item_name = item_code + item.item_group = "All Item Groups" + item.stock_uom = "Nos" + item.is_stock_item = 1 + return item + + +@pytest.fixture() +def beam_settings(): + company = frappe.defaults.get_defaults().get("company") + settings = frappe.get_doc("BEAM Settings", {"company": company}) + original = settings.auto_barcode_doctypes + yield settings + settings.auto_barcode_doctypes = original + settings.save() + + +def test_barcode_generated_when_doctype_allowed(beam_settings): + beam_settings.auto_barcode_doctypes = '["Item", "Warehouse"]' + beam_settings.save() + + item = _make_item("_Test Barcode Allow Item") + create_beam_barcode(item) + + assert any(b.barcode_type == "Code128" for b in item.barcodes) + + +def test_barcode_not_generated_when_doctype_not_allowed(beam_settings): + beam_settings.auto_barcode_doctypes = '["Warehouse"]' + beam_settings.save() + + item = _make_item("_Test Barcode Disallow Item") + create_beam_barcode(item) + + assert not any(b.barcode_type == "Code128" for b in item.barcodes) + + +def test_barcode_not_duplicated_when_code128_exists(beam_settings): + beam_settings.auto_barcode_doctypes = '["Item", "Warehouse"]' + beam_settings.save() + + item = _make_item("_Test Barcode Dedup Item") + item.append("barcodes", {"barcode": "12345678901234567890", "barcode_type": "Code128"}) + create_beam_barcode(item) + + code128_barcodes = [b for b in item.barcodes if b.barcode_type == "Code128"] + assert len(code128_barcodes) == 1 diff --git a/beam/tests/test_handling_unit.py b/beam/tests/test_handling_unit.py index 68b73639..c02386f9 100644 --- a/beam/tests/test_handling_unit.py +++ b/beam/tests/test_handling_unit.py @@ -23,6 +23,85 @@ def submit_all_purchase_receipts(): @pytest.mark.order(10) +def test_enable_handling_units_setting(): + """Test that enable_handling_units setting controls whether handling units are assigned to SLEs""" + company = frappe.defaults.get_defaults().get("company") + + # Test with enable_handling_units = False (default) + beam_settings = frappe.get_doc("BEAM Settings", {"company": company}) + original_value = beam_settings.enable_handling_units + beam_settings.enable_handling_units = 0 + beam_settings.save() + + try: + se_disabled = frappe.new_doc("Stock Entry") + se_disabled.stock_entry_type = se_disabled.purpose = "Material Receipt" + se_disabled.company = company + se_disabled.append( + "items", + { + "item_code": "Ambrosia Pie", + "qty": 10, + "t_warehouse": "Baked Goods - APC", + "basic_rate": frappe.get_value("Item Price", {"item_code": "Ambrosia Pie"}, "price_list_rate"), + }, + ) + se_disabled.save() + se_disabled.submit() + + # When disabled, handling_unit should NOT be generated + item_row = se_disabled.items[0] + assert ( + not item_row.handling_unit + ), f"Item row should not have handling_unit when setting is disabled, but got: {item_row.handling_unit}" + + # Check SLE - handling_unit should also NOT be set + sle_disabled = frappe.get_doc("Stock Ledger Entry", {"voucher_detail_no": item_row.name}) + assert ( + not sle_disabled.handling_unit or sle_disabled.handling_unit == "" + ), f"SLE should not have handling_unit when enable_handling_units is disabled, but got: {sle_disabled.handling_unit}" + + # Now test with enable_handling_units = True + beam_settings.enable_handling_units = 1 + beam_settings.save() + + se_enabled = frappe.new_doc("Stock Entry") + se_enabled.stock_entry_type = se_enabled.purpose = "Material Receipt" + se_enabled.company = company + se_enabled.append( + "items", + { + "item_code": "Ambrosia Pie", + "qty": 10, + "t_warehouse": "Baked Goods - APC", + "basic_rate": frappe.get_value("Item Price", {"item_code": "Ambrosia Pie"}, "price_list_rate"), + }, + ) + se_enabled.save() + se_enabled.submit() + + # When enabled, handling_unit should be generated on item row + item_row_enabled = se_enabled.items[0] + assert ( + item_row_enabled.handling_unit + ), "Item row should have handling_unit when setting is enabled" + + # Check SLE - handling_unit SHOULD be set when enabled + sle_enabled = frappe.get_doc("Stock Ledger Entry", {"voucher_detail_no": item_row_enabled.name}) + assert ( + sle_enabled.handling_unit + ), "SLE should have handling_unit when enable_handling_units is enabled" + assert ( + sle_enabled.handling_unit == item_row_enabled.handling_unit + ), f"SLE handling_unit should match item row: {sle_enabled.handling_unit} != {item_row_enabled.handling_unit}" + + finally: + # Restore original setting + beam_settings.enable_handling_units = original_value + beam_settings.save() + + +@pytest.mark.order(1) def test_purchase_receipt_handling_unit_generation(): for pr in frappe.get_all("Purchase Receipt"): pr = frappe.get_doc("Purchase Receipt", pr) @@ -219,9 +298,11 @@ def test_stock_entry_for_manufacture(): se_tfm = frappe.get_value( "Stock Entry", {"work_order": wo, "purpose": "Material Transfer for Manufacture"} ) - job_cards = frappe.get_all("Job Card", {"work_order": wo}) - for job_card in job_cards: - job_card = frappe.get_doc("Job Card", job_card) + job_cards = frappe.get_all( + "Job Card", {"work_order": wo}, ["name", "sequence_id"], order_by="sequence_id asc" + ) + for jc in job_cards: + job_card = frappe.get_doc("Job Card", jc.name) # Complete the job card by setting completed qty equal to qty to manufacture for time_log in job_card.time_logs: time_log.completed_qty = job_card.for_quantity @@ -483,7 +564,11 @@ def test_stock_entry_material_transfer(): "Item", row.item_code, "enable_handling_unit" ): continue - sle = frappe.get_doc("Stock Ledger Entry", {"handling_unit": row.handling_unit}) + # For Material Transfer, there are two SLEs - one for source (negative) and one for target (positive) + # Get the source warehouse SLE (the one consuming from the handling unit) + sle = frappe.get_doc( + "Stock Ledger Entry", {"handling_unit": row.handling_unit, "warehouse": row.s_warehouse} + ) hu = get_handling_unit(str(row.handling_unit)) assert row.transfer_qty == abs(sle.actual_qty) assert hu.stock_qty == 95 # net qty @@ -701,3 +786,223 @@ def test_handling_units_overconsumption_in_delivery_note(): f"Row #1: Handling Unit for Ambrosia Pie cannot be more than {hu.stock_qty} {hu.stock_uom}. You have {row_qty:.1f} {row_stock_uom}" in exc_info.value.args[0] ) + + +@pytest.mark.order(15) +def test_repack_cancel_without_recombine(): + """Test cancelling a Repack Stock Entry without recombining handling units""" + # Create a material receipt with a known handling unit + se_receipt = frappe.new_doc("Stock Entry") + se_receipt.stock_entry_type = se_receipt.purpose = "Material Receipt" + se_receipt.append( + "items", + { + "item_code": "Parchment Paper", + "qty": 100, + "t_warehouse": "Storeroom - APC", + "basic_rate": frappe.get_value( + "Item Price", {"item_code": "Parchment Paper"}, "price_list_rate" + ), + }, + ) + se_receipt.save() + se_receipt.submit() + source_hu = se_receipt.items[0].handling_unit + + # Create a repack entry + se_repack = frappe.new_doc("Stock Entry") + se_repack.stock_entry_type = se_repack.purpose = "Repack" + se_repack.append( + "items", + { + "item_code": "Parchment Paper", + "qty": 1, + "uom": "Box", + "conversion_factor": 100, + "stock_qty": 100, + "actual_qty": 100, + "transfer_qty": 100, + "s_warehouse": "Storeroom - APC", + "handling_unit": source_hu, + }, + ) + se_repack.append( + "items", + { + "item_code": "Parchment Paper", + "uom": "Nos", + "qty": 100, + "actual_qty": 100, + "transfer_qty": 100, + "t_warehouse": "Storeroom - APC", + }, + ) + se_repack.save() + se_repack.submit() + + source_row = se_repack.items[0] + target_row = se_repack.items[1] + target_hu = target_row.handling_unit + + # Verify initial state + source_hu_doc = get_handling_unit(source_hu) + target_hu_doc = get_handling_unit(target_hu) + assert source_hu_doc.stock_qty == 0 # consumed + assert target_hu_doc.stock_qty == 100 # created + + # Cancel WITHOUT recombine (don't set recombine_on_cancel) + se_repack.cancel() + + # After cancel without recombine: + # - Source HU should have qty 0 (consumed stays consumed) + # - Target HU should still exist with qty 100 (produced stays produced) + # This "keep separate" behavior maintains the split in cancelled state + source_hu_doc = get_handling_unit(source_hu) + target_hu_doc = get_handling_unit(target_hu) + assert source_hu_doc.stock_qty == 0 # consumed + assert target_hu_doc.stock_qty == 100 # produced + + +@pytest.mark.order(16) +def test_repack_cancel_with_recombine(): + """Test cancelling a Repack Stock Entry WITH recombining handling units""" + # Create a material receipt with a known handling unit + se_receipt = frappe.new_doc("Stock Entry") + se_receipt.stock_entry_type = se_receipt.purpose = "Material Receipt" + se_receipt.append( + "items", + { + "item_code": "Parchment Paper", + "qty": 100, + "t_warehouse": "Storeroom - APC", + "basic_rate": frappe.get_value( + "Item Price", {"item_code": "Parchment Paper"}, "price_list_rate" + ), + }, + ) + se_receipt.save() + se_receipt.submit() + source_hu = se_receipt.items[0].handling_unit + + # Create a repack entry + se_repack = frappe.new_doc("Stock Entry") + se_repack.stock_entry_type = se_repack.purpose = "Repack" + se_repack.append( + "items", + { + "item_code": "Parchment Paper", + "qty": 1, + "uom": "Box", + "conversion_factor": 100, + "stock_qty": 100, + "actual_qty": 100, + "transfer_qty": 100, + "s_warehouse": "Storeroom - APC", + "handling_unit": source_hu, + }, + ) + se_repack.append( + "items", + { + "item_code": "Parchment Paper", + "uom": "Nos", + "qty": 100, + "actual_qty": 100, + "transfer_qty": 100, + "t_warehouse": "Storeroom - APC", + }, + ) + se_repack.save() + se_repack.submit() + + source_row = se_repack.items[0] + target_row = se_repack.items[1] + target_hu = target_row.handling_unit + + # Set recombine_on_cancel on BOTH rows (as the frontend does) + source_row.db_set("recombine_on_cancel", True) + target_row.db_set("recombine_on_cancel", True) + + # Cancel WITH recombine + se_repack.reload() + se_repack.cancel() + + # After cancel with recombine: + # - Source HU should NOT get additional entries (recombine prevents split) + # - Target HU should NOT exist (was recombined back) + source_hu_doc = get_handling_unit(source_hu) + target_hu_doc = get_handling_unit(target_hu) + + # Source HU should have the original quantity (no split entries added) + assert source_hu_doc.stock_qty == 100 + # Target HU should be empty/zero (recombined back to source) + assert target_hu_doc is None or target_hu_doc.stock_qty == 0 + + +@pytest.mark.order(17) +def test_material_transfer_cancel_without_recombine(): + """Test cancelling a Material Transfer Stock Entry without recombining handling units""" + # Create a material receipt + se_receipt = frappe.new_doc("Stock Entry") + se_receipt.stock_entry_type = se_receipt.purpose = "Material Receipt" + se_receipt.append( + "items", + { + "item_code": "Parchment Paper", + "qty": 100, + "t_warehouse": "Storeroom - APC", + "basic_rate": frappe.get_value( + "Item Price", {"item_code": "Parchment Paper"}, "price_list_rate" + ), + }, + ) + se_receipt.save() + se_receipt.submit() + source_hu = se_receipt.items[0].handling_unit + + # Create a material transfer + se_transfer = frappe.new_doc("Stock Entry") + se_transfer.stock_entry_type = se_transfer.purpose = "Material Transfer" + se_transfer.company = frappe.defaults.get_defaults().get("company") + + scan = frappe.call( + "beam.beam.scan.scan", + **{ + "barcode": str(source_hu), + "context": {"frm": "Stock Entry", "doc": se_transfer.as_dict()}, + "current_qty": 1, + }, + ) + se_transfer.append( + "items", + { + **scan[0]["context"], + "qty": 50, + "actual_qty": 50, + "transfer_qty": 50, + "s_warehouse": "Storeroom - APC", + "t_warehouse": "Kitchen - APC", + }, + ) + se_transfer.save() + se_transfer.submit() + + transfer_row = se_transfer.items[0] + target_hu = transfer_row.to_handling_unit + + # Verify initial state + source_hu_doc = get_handling_unit(source_hu) + target_hu_doc = get_handling_unit(target_hu) + assert source_hu_doc.stock_qty == 50 # remaining in source + assert target_hu_doc.stock_qty == 50 # transferred to target + + # Cancel WITHOUT recombine + se_transfer.cancel() + + # After cancel without recombine: + # - Source HU should be restored + # - Target HU should also be restored (both persist separately) + source_hu_doc = get_handling_unit(source_hu) + target_hu_doc = get_handling_unit(target_hu) + assert source_hu_doc.stock_qty == 50 # restored in source warehouse + assert target_hu_doc.stock_qty == 50 # restored in target warehouse diff --git a/beam/tests/test_item_barcode_print_format.py b/beam/tests/test_item_barcode_print_format.py new file mode 100644 index 00000000..46ce6a9e --- /dev/null +++ b/beam/tests/test_item_barcode_print_format.py @@ -0,0 +1,23 @@ +# Copyright (c) 2025, AgriTheory and contributors +# Test for barcode generation in Item Barcode print format + +import pytest + +from beam.beam.barcodes import barcode128 + + +@pytest.mark.parametrize("barcode_text", ["123456789012", "ITEM-00001", "987654321098"]) +def test_item_barcode_print_format(barcode_text): + # Generate barcode image in print format + img_html = barcode128(barcode_text) + assert img_html.startswith('') + # Optionally, check that the base64 string decodes to PNG + import base64 + import re + + match = re.search(r"data:image/png;base64,([A-Za-z0-9+/=]+)", img_html) + assert match, "No base64 PNG found in img tag" + png_bytes = base64.b64decode(match.group(1)) + assert png_bytes[:8] == b"\x89PNG\r\n\x1a\n", "Not a PNG file" diff --git a/beam/tests/test_printing.py b/beam/tests/test_printing.py new file mode 100644 index 00000000..e2f6f6a0 --- /dev/null +++ b/beam/tests/test_printing.py @@ -0,0 +1,85 @@ +# Copyright (c) 2025, AgriTheory and contributors +# For license information, please see license.txt + +from unittest.mock import Mock, patch + +import frappe +from frappe.exceptions import DoesNotExistError + +from beam.beam.printing import print_by_server + + +def test_print_by_server_empty_string_uses_standard(): + """Empty print_format should default to Standard""" + mock_cups = Mock() + mock_cups.IPPError = Exception + with patch("beam.beam.printing.cups", mock_cups): + try: + print_by_server( + doctype="Item", + name="Ambrosia Pie", + printer_setting="Kitchen Printer", + print_format="", + ) + except DoesNotExistError as e: + # Should fail trying to get "Standard" print format + assert "Standard" in str(e) + + +def test_print_by_server_none_uses_standard(): + """None print_format should default to Standard""" + mock_cups = Mock() + mock_cups.IPPError = Exception + with patch("beam.beam.printing.cups", mock_cups): + try: + print_by_server( + doctype="Item", + name="Ambrosia Pie", + printer_setting="Kitchen Printer", + print_format=None, + ) + except DoesNotExistError as e: + # Should fail trying to get "Standard" print format + assert "Standard" in str(e) + + +def test_print_by_server_explicit_format(): + """Explicit print_format should be used""" + from beam.beam.printing import print_by_server + + mock_cups = Mock() + mock_cups.IPPError = Exception + with patch("beam.beam.printing.cups", mock_cups): + try: + print_by_server( + doctype="Item", + name="Ambrosia Pie", + printer_setting="Kitchen Printer", + print_format="Item Barcode", + ) + except Exception as e: + # Should NOT fail on "Standard" - should use explicit format + assert "Standard" not in str(e), "Should use explicit format, not Standard" + + +def test_print_by_server_with_serialized_doc(): + """Serialized doc should be properly deserialized as full document instance""" + # Get a real item doc and serialize it like the frontend would + item = frappe.get_doc("Item", "Ambrosia Pie") + serialized_doc = frappe.as_json(item.as_dict()) + + mock_cups = Mock() + mock_cups.IPPError = Exception + with patch("beam.beam.printing.cups", mock_cups): + try: + print_by_server( + doctype="Item", + name="Ambrosia Pie", + printer_setting="Kitchen Printer", + print_format="Item Barcode", + doc=serialized_doc, # Pass as JSON string + ) + except Exception as e: + # Should not fail with AttributeError about 'in_print' + assert "in_print" not in str(e) + assert not isinstance(e, AttributeError) diff --git a/beam/tests/test_serial_number.py b/beam/tests/test_serial_number.py new file mode 100644 index 00000000..a304a8ba --- /dev/null +++ b/beam/tests/test_serial_number.py @@ -0,0 +1,134 @@ +# Copyright (c) 2025, AgriTheory and contributors +# For license information, please see license.txt + +import frappe +import pytest +from frappe.utils import today + + +def _make_serials(series="WCC-.#####", qty=1): + from frappe.model.naming import make_autoname + + return [make_autoname(series) for _ in range(qty)] + + +@pytest.mark.order(20) +def test_serial_number_scan(): + warehouse = "Storeroom - APC" + supplier = "Unity Bakery Supply" + item_code = "Whipped Cream Canister" + serials = _make_serials(qty=3) + pr = frappe.get_doc( + { + "doctype": "Purchase Receipt", + "supplier": supplier, + "posting_date": today(), + "items": [ + { + "item_code": item_code, + "qty": 1, + "received_qty": 1, + "rate": 10, + "warehouse": warehouse, + "serial_no": serials[0], + "use_serial_batch_fields": 1, + } + ], + } + ) + pr.save() + pr.submit() + + # Serial No scanning disabled + company = frappe.defaults.get_defaults().get("company") + settings = frappe.get_doc("BEAM Settings", {"company": company}) + settings.scan_serial_no = 0 + settings.save() + assert settings.scan_serial_no == 0 + scan = frappe.call( + "beam.beam.scan.scan", + **{"barcode": str(serials[0]), "context": {"listview": "Purchase Receipt"}} + ) + assert scan is None + + # Serial No scanning enabled + settings.scan_serial_no = 1 + settings.save() + + assert settings.scan_serial_no == 1 + scan = frappe.call( + "beam.beam.scan.scan", + **{"barcode": str(serials[0]), "context": {"listview": "Purchase Receipt"}} + ) + assert scan[0]["action"] == "route" + assert scan[0]["doctype"] == "Purchase Receipt" + assert scan[0]["field"] == "Purchase Receipt" + assert scan[0]["target"] == pr.name + + pi = frappe.get_doc( + { + "doctype": "Purchase Invoice", + "supplier": supplier, + "posting_date": today(), + "update_stock": 1, + "items": [ + { + "item_code": item_code, + "qty": 1, + "received_qty": 1, + "rate": 10, + "warehouse": warehouse, + "serial_no": serials[1], + "use_serial_batch_fields": 1, + } + ], + } + ) + pi.save() + pi.submit() + + company = frappe.defaults.get_defaults().get("company") + settings = frappe.get_doc("BEAM Settings", {"company": company}) + settings.scan_serial_no = 1 + settings.save() + scan = frappe.call( + "beam.beam.scan.scan", + **{"barcode": str(serials[1]), "context": {"listview": "Purchase Invoice"}} + ) + assert scan[0]["action"] == "filter" + assert scan[0]["doctype"] == "Purchase Invoice" + assert scan[0]["field"] == "name" + assert scan[0]["target"] == pi.name + + dn = frappe.get_doc( + { + "doctype": "Delivery Note", + "customer": "Longwoods Sandwich Shop", + "posting_date": today(), + "items": [ + { + "item_code": item_code, + "qty": 1, + "received_qty": 1, + "rate": 10, + "warehouse": warehouse, + "serial_no": serials[1], + "use_serial_batch_fields": 1, + } + ], + } + ) + dn.save() + dn.submit() + + company = frappe.defaults.get_defaults().get("company") + settings = frappe.get_doc("BEAM Settings", {"company": company}) + settings.scan_serial_no = 1 + settings.save() + scan = frappe.call( + "beam.beam.scan.scan", **{"barcode": str(serials[1]), "context": {"listview": "Delivery Note"}} + ) + assert scan[0]["action"] == "filter" + assert scan[0]["doctype"] == "Delivery Note" + assert scan[0]["field"] == "name" + assert scan[0]["target"] == dn.name diff --git a/poetry.lock b/poetry.lock index c2d72381..622b215d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,130 +1,138 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "certifi" -version = "2024.8.30" +version = "2026.2.25" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"}, + {file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"}, ] [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] @@ -608,14 +616,14 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] @@ -655,4 +663,4 @@ resolved_reference = "45ffc60638814df575d9fe11c7504b1a533e4ecb" [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "f125f1cb02654c1352139afe242a3336876573fdc1e9222a99eb04648802cc17" +content-hash = "943528828f8114492fa2ba8112b91b96557d0182e2507a3c8627db881cb22e8e" diff --git a/pyproject.toml b/pyproject.toml index 61fbe324..9b3c7c8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,11 @@ [project] name = "beam" +version = "15.8.0" +authors = [ + { name = "AgriTheory", email = "support@agritheory.dev" } +] description = "Barcode Scanning for ERPNext" -authors = [{name = "AgriTheory", email = "support@agritheory.dev"}] +requires-python = ">=3.10" readme = "README.md" license = { file = "LICENSE" } dynamic = [ "version", "dependencies", "requires-python" ] @@ -14,6 +18,11 @@ python = ">=3.10,<3.14" python-barcode = "^0.15.1" zebra-zpl = {git = "https://github.com/mtking2/py-zebra-zpl.git"} +[tool.bench.dev-dependencies] +pytest = "~=8.3.2" +pytest-cov = "~=5.0.0" +pytest-order = "~=1.2.1" + [tool.poetry.group.dev.dependencies] pytest = "^8.4.1" pytest-cov = "^6.2.1" @@ -24,6 +33,10 @@ pytest-playwright = "^0.7.0" requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" +[tool.bench.frappe-dependencies] +frappe = ">=15.0.0,<16.0.0" +erpnext = ">=15.0.0,<16.0.0" + [tool.pytest.ini_options] addopts = "--cov=beam --cov-report term-missing" @@ -75,4 +88,4 @@ version_variable = [ ] [tool.semantic_release.branches.version] -match = "version-15" +match = "version-15" \ No newline at end of file diff --git a/setup.py b/setup.py index ddc3c973..09463f34 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,12 @@ -# Copyright (c) 2025, AgriTheory and contributors +# Copyright (c) 2026, AgriTheory and contributors # For license information, please see license.txt -from setuptools import setup +from setuptools import find_packages, setup -name = "beam" - -setup() +setup( + name="beam", + version="14.8.7", + packages=find_packages(), + include_package_data=True, + zip_safe=False, +)