Merge pull request #29907 from deepeshgarg007/tds_payable_total_amount
fix: Total Credit amount in TDS Payable monthly report
diff --git a/.mergify.yml b/.mergify.yml
index f3d0409..b7d1df4 100644
--- a/.mergify.yml
+++ b/.mergify.yml
@@ -14,9 +14,39 @@
close:
comment:
message: |
- @{{author}}, thanks for the contribution, but we do not accept pull requests on a stable branch. Please raise PR on an appropriate hotfix branch.
+ @{{author}}, thanks for the contribution, but we do not accept pull requests on a stable branch. Please raise PR on an appropriate hotfix branch.
https://github.com/frappe/erpnext/wiki/Pull-Request-Checklist#which-branch
+ - name: backport to develop
+ conditions:
+ - label="backport develop"
+ actions:
+ backport:
+ branches:
+ - develop
+ assignees:
+ - "{{ author }}"
+
+ - name: backport to version-14-hotfix
+ conditions:
+ - label="backport version-14-hotfix"
+ actions:
+ backport:
+ branches:
+ - version-14-hotfix
+ assignees:
+ - "{{ author }}"
+
+ - name: backport to version-14-pre-release
+ conditions:
+ - label="backport version-14-pre-release"
+ actions:
+ backport:
+ branches:
+ - version-14-pre-release
+ assignees:
+ - "{{ author }}"
+
- name: backport to version-13-hotfix
conditions:
- label="backport version-13-hotfix"
@@ -55,4 +85,4 @@
branches:
- version-12-pre-release
assignees:
- - "{{ author }}"
\ No newline at end of file
+ - "{{ author }}"
diff --git a/erpnext/accounts/doctype/cash_flow_mapping_template_details/cash_flow_mapping_template_details.json b/erpnext/accounts/doctype/cash_flow_mapping_template_details/cash_flow_mapping_template_details.json
index 22cf797..02c6875 100644
--- a/erpnext/accounts/doctype/cash_flow_mapping_template_details/cash_flow_mapping_template_details.json
+++ b/erpnext/accounts/doctype/cash_flow_mapping_template_details/cash_flow_mapping_template_details.json
@@ -1,94 +1,34 @@
{
- "allow_copy": 0,
- "allow_guest_to_view": 0,
- "allow_import": 0,
- "allow_rename": 0,
- "autoname": "field:mapping",
- "beta": 0,
- "creation": "2018-02-08 10:18:48.513608",
- "custom": 0,
- "docstatus": 0,
- "doctype": "DocType",
- "document_type": "",
- "editable_grid": 1,
- "engine": "InnoDB",
+ "actions": [],
+ "creation": "2018-02-08 10:18:48.513608",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "mapping"
+ ],
"fields": [
{
- "allow_bulk_edit": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fieldname": "mapping",
- "fieldtype": "Link",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 1,
- "in_standard_filter": 0,
- "label": "Mapping",
- "length": 0,
- "no_copy": 0,
- "options": "Cash Flow Mapping",
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 1,
- "search_index": 0,
- "set_only_once": 0,
- "unique": 0
+ "fieldname": "mapping",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Mapping",
+ "options": "Cash Flow Mapping",
+ "reqd": 1,
+ "unique": 1
}
- ],
- "has_web_view": 0,
- "hide_heading": 0,
- "hide_toolbar": 0,
- "idx": 0,
- "image_view": 0,
- "in_create": 0,
- "is_submittable": 0,
- "issingle": 0,
- "istable": 0,
- "max_attachments": 0,
- "modified": "2018-02-08 10:33:39.413930",
- "modified_by": "Administrator",
- "module": "Accounts",
- "name": "Cash Flow Mapping Template Details",
- "name_case": "",
- "owner": "Administrator",
- "permissions": [
- {
- "amend": 0,
- "apply_user_permissions": 0,
- "cancel": 0,
- "create": 1,
- "delete": 1,
- "email": 1,
- "export": 1,
- "if_owner": 0,
- "import": 0,
- "permlevel": 0,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "System Manager",
- "set_user_permissions": 0,
- "share": 1,
- "submit": 0,
- "write": 1
- }
- ],
- "quick_entry": 1,
- "read_only": 0,
- "read_only_onload": 0,
- "show_name_in_global_search": 0,
- "sort_field": "modified",
- "sort_order": "DESC",
- "track_changes": 1,
- "track_seen": 0
+ ],
+ "istable": 1,
+ "links": [],
+ "modified": "2022-02-21 03:34:57.902332",
+ "modified_by": "Administrator",
+ "module": "Accounts",
+ "name": "Cash Flow Mapping Template Details",
+ "owner": "Administrator",
+ "permissions": [],
+ "quick_entry": 1,
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "states": [],
+ "track_changes": 1
}
\ No newline at end of file
diff --git a/erpnext/accounts/report/consolidated_financial_statement/consolidated_financial_statement.py b/erpnext/accounts/report/consolidated_financial_statement/consolidated_financial_statement.py
index 758e3e9..1e20f7b 100644
--- a/erpnext/accounts/report/consolidated_financial_statement/consolidated_financial_statement.py
+++ b/erpnext/accounts/report/consolidated_financial_statement/consolidated_financial_statement.py
@@ -354,9 +354,6 @@
if d.parent_account:
account = d.parent_account_name
- # if not accounts_by_name.get(account):
- # continue
-
for company in companies:
accounts_by_name[account][company] = \
accounts_by_name[account].get(company, 0.0) + d.get(company, 0.0)
@@ -367,7 +364,7 @@
accounts_by_name[account].get("opening_balance", 0.0) + d.get("opening_balance", 0.0)
def get_account_heads(root_type, companies, filters):
- accounts = get_accounts(root_type, filters)
+ accounts = get_accounts(root_type, companies)
if not accounts:
return None, None, None
@@ -396,7 +393,7 @@
for account in accounts:
if account.parent_account:
- account["parent_account_name"] = name_to_account_map[account.parent_account]
+ account["parent_account_name"] = name_to_account_map.get(account.parent_account)
return accounts
@@ -419,12 +416,19 @@
return frappe.db.sql_list("""select name from `tabCompany`
where lft >= {0} and rgt <= {1} order by lft, rgt""".format(lft, rgt))
-def get_accounts(root_type, filters):
- return frappe.db.sql(""" select name, is_group, company,
- parent_account, lft, rgt, root_type, report_type, account_name, account_number
- from
- `tabAccount` where company = %s and root_type = %s
- """ , (filters.get('company'), root_type), as_dict=1)
+def get_accounts(root_type, companies):
+ accounts = []
+ added_accounts = []
+
+ for company in companies:
+ for account in frappe.get_all("Account", fields=["name", "is_group", "company",
+ "parent_account", "lft", "rgt", "root_type", "report_type", "account_name", "account_number"],
+ filters={"company": company, "root_type": root_type}):
+ if account.account_name not in added_accounts:
+ accounts.append(account)
+ added_accounts.append(account.account_name)
+
+ return accounts
def prepare_data(accounts, start_date, end_date, balance_must_be, companies, company_currency, filters):
data = []
diff --git a/erpnext/loan_management/doctype/loan_interest_accrual/loan_interest_accrual.py b/erpnext/loan_management/doctype/loan_interest_accrual/loan_interest_accrual.py
index 0de073f..1c800a0 100644
--- a/erpnext/loan_management/doctype/loan_interest_accrual/loan_interest_accrual.py
+++ b/erpnext/loan_management/doctype/loan_interest_accrual/loan_interest_accrual.py
@@ -74,39 +74,6 @@
})
)
- if self.payable_principal_amount:
- gle_map.append(
- self.get_gl_dict({
- "account": self.loan_account,
- "party_type": self.applicant_type,
- "party": self.applicant,
- "against": self.interest_income_account,
- "debit": self.payable_principal_amount,
- "debit_in_account_currency": self.interest_amount,
- "against_voucher_type": "Loan",
- "against_voucher": self.loan,
- "remarks": _("Interest accrued from {0} to {1} against loan: {2}").format(
- self.last_accrual_date, self.posting_date, self.loan),
- "cost_center": erpnext.get_default_cost_center(self.company),
- "posting_date": self.posting_date
- })
- )
-
- gle_map.append(
- self.get_gl_dict({
- "account": self.interest_income_account,
- "against": self.loan_account,
- "credit": self.payable_principal_amount,
- "credit_in_account_currency": self.interest_amount,
- "against_voucher_type": "Loan",
- "against_voucher": self.loan,
- "remarks": ("Interest accrued from {0} to {1} against loan: {2}").format(
- self.last_accrual_date, self.posting_date, self.loan),
- "cost_center": erpnext.get_default_cost_center(self.company),
- "posting_date": self.posting_date
- })
- )
-
if gle_map:
make_gl_entries(gle_map, cancel=cancel, adv_adj=adv_adj)
diff --git a/erpnext/regional/report/gstr_1/gstr_1.js b/erpnext/regional/report/gstr_1/gstr_1.js
index 4b98978..9999a6d 100644
--- a/erpnext/regional/report/gstr_1/gstr_1.js
+++ b/erpnext/regional/report/gstr_1/gstr_1.js
@@ -17,7 +17,7 @@
"fieldtype": "Link",
"options": "Address",
"get_query": function () {
- var company = frappe.query_report.get_filter_value('company');
+ let company = frappe.query_report.get_filter_value('company');
if (company) {
return {
"query": 'frappe.contacts.doctype.address.address.address_query',
@@ -27,6 +27,11 @@
}
},
{
+ "fieldname": "company_gstin",
+ "label": __("Company GSTIN"),
+ "fieldtype": "Select"
+ },
+ {
"fieldname": "from_date",
"label": __("From Date"),
"fieldtype": "Date",
@@ -60,10 +65,21 @@
}
],
onload: function (report) {
+ let filters = report.get_values();
+
+ frappe.call({
+ method: 'erpnext.regional.report.gstr_1.gstr_1.get_company_gstins',
+ args: {
+ company: filters.company
+ },
+ callback: function(r) {
+ frappe.query_report.page.fields_dict.company_gstin.df.options = r.message;
+ frappe.query_report.page.fields_dict.company_gstin.refresh();
+ }
+ });
+
report.page.add_inner_button(__("Download as JSON"), function () {
- var filters = report.get_values();
-
frappe.call({
method: 'erpnext.regional.report.gstr_1.gstr_1.get_json',
args: {
diff --git a/erpnext/regional/report/gstr_1/gstr_1.py b/erpnext/regional/report/gstr_1/gstr_1.py
index ce2ffb4..8fcb6bb 100644
--- a/erpnext/regional/report/gstr_1/gstr_1.py
+++ b/erpnext/regional/report/gstr_1/gstr_1.py
@@ -253,7 +253,8 @@
for opts in (("company", " and company=%(company)s"),
("from_date", " and posting_date>=%(from_date)s"),
("to_date", " and posting_date<=%(to_date)s"),
- ("company_address", " and company_address=%(company_address)s")):
+ ("company_address", " and company_address=%(company_address)s"),
+ ("company_gstin", " and company_gstin=%(company_gstin)s")):
if self.filters.get(opts[0]):
conditions += opts[1]
@@ -1192,3 +1193,23 @@
return True
else:
return False
+
+
+@frappe.whitelist()
+def get_company_gstins(company):
+ address = frappe.qb.DocType("Address")
+ links = frappe.qb.DocType("Dynamic Link")
+
+ addresses = frappe.qb.from_(address).inner_join(links).on(
+ address.name == links.parent
+ ).select(
+ address.gstin
+ ).where(
+ links.link_doctype == 'Company'
+ ).where(
+ links.link_name == company
+ ).run(as_dict=1)
+
+ address_list = [''] + [d.gstin for d in addresses]
+
+ return address_list
\ No newline at end of file
diff --git a/erpnext/selling/doctype/sales_order_item/sales_order_item.json b/erpnext/selling/doctype/sales_order_item/sales_order_item.json
index 95f6c4e..080d517 100644
--- a/erpnext/selling/doctype/sales_order_item/sales_order_item.json
+++ b/erpnext/selling/doctype/sales_order_item/sales_order_item.json
@@ -83,8 +83,8 @@
"planned_qty",
"column_break_69",
"work_order_qty",
- "delivered_qty",
"produced_qty",
+ "delivered_qty",
"returned_qty",
"shopping_cart_section",
"additional_notes",
@@ -701,10 +701,8 @@
"width": "50px"
},
{
- "description": "For Production",
"fieldname": "produced_qty",
"fieldtype": "Float",
- "hidden": 1,
"label": "Produced Quantity",
"oldfieldname": "produced_qty",
"oldfieldtype": "Currency",
@@ -802,7 +800,7 @@
"idx": 1,
"istable": 1,
"links": [],
- "modified": "2021-10-05 12:27:25.014789",
+ "modified": "2022-02-21 13:55:08.883104",
"modified_by": "Administrator",
"module": "Selling",
"name": "Sales Order Item",
@@ -811,5 +809,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
+ "states": [],
"track_changes": 1
}
\ No newline at end of file
diff --git a/erpnext/selling/page/point_of_sale/pos_payment.js b/erpnext/selling/page/point_of_sale/pos_payment.js
index 9650bc8..4d75e6e 100644
--- a/erpnext/selling/page/point_of_sale/pos_payment.js
+++ b/erpnext/selling/page/point_of_sale/pos_payment.js
@@ -180,14 +180,6 @@
() => frm.save(),
() => this.update_totals_section(frm.doc)
]);
- } else {
- frappe.run_serially([
- () => frm.doc.ignore_pricing_rule=1,
- () => frm.trigger('ignore_pricing_rule'),
- () => frm.doc.ignore_pricing_rule=0,
- () => frm.save(),
- () => this.update_totals_section(frm.doc)
- ]);
}
}
});
diff --git a/erpnext/stock/report/stock_ageing/stock_ageing.py b/erpnext/stock/report/stock_ageing/stock_ageing.py
index a89a403..97a740e 100644
--- a/erpnext/stock/report/stock_ageing/stock_ageing.py
+++ b/erpnext/stock/report/stock_ageing/stock_ageing.py
@@ -12,6 +12,7 @@
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
Filters = frappe._dict
+precision = cint(frappe.db.get_single_value("System Settings", "float_precision"))
def execute(filters: Filters = None) -> Tuple:
to_date = filters["to_date"]
@@ -48,10 +49,13 @@
if filters.get("show_warehouse_wise_stock"):
row.append(details.warehouse)
- row.extend([item_dict.get("total_qty"), average_age,
+ row.extend([
+ flt(item_dict.get("total_qty"), precision),
+ average_age,
range1, range2, range3, above_range3,
earliest_age, latest_age,
- details.stock_uom])
+ details.stock_uom
+ ])
data.append(row)
@@ -79,13 +83,13 @@
qty = flt(item[0]) if not item_dict["has_serial_no"] else 1.0
if age <= filters.range1:
- range1 += qty
+ range1 = flt(range1 + qty, precision)
elif age <= filters.range2:
- range2 += qty
+ range2 = flt(range2 + qty, precision)
elif age <= filters.range3:
- range3 += qty
+ range3 = flt(range3 + qty, precision)
else:
- above_range3 += qty
+ above_range3 = flt(above_range3 + qty, precision)
return range1, range2, range3, above_range3
@@ -286,14 +290,16 @@
def __compute_incoming_stock(self, row: Dict, fifo_queue: List, transfer_key: Tuple, serial_nos: List):
"Update FIFO Queue on inward stock."
- if self.transferred_item_details.get(transfer_key):
+ transfer_data = self.transferred_item_details.get(transfer_key)
+ if transfer_data:
# inward/outward from same voucher, item & warehouse
- slot = self.transferred_item_details[transfer_key].pop(0)
- fifo_queue.append(slot)
+ # eg: Repack with same item, Stock reco for batch item
+ # consume transfer data and add stock to fifo queue
+ self.__adjust_incoming_transfer_qty(transfer_data, fifo_queue, row)
else:
if not serial_nos:
- if fifo_queue and flt(fifo_queue[0][0]) < 0:
- # neutralize negative stock by adding positive stock
+ if fifo_queue and flt(fifo_queue[0][0]) <= 0:
+ # neutralize 0/negative stock by adding positive stock
fifo_queue[0][0] += flt(row.actual_qty)
fifo_queue[0][1] = row.posting_date
else:
@@ -324,7 +330,7 @@
elif not fifo_queue:
# negative stock, no balance but qty yet to consume
fifo_queue.append([-(qty_to_pop), row.posting_date])
- self.transferred_item_details[transfer_key].append([row.actual_qty, row.posting_date])
+ self.transferred_item_details[transfer_key].append([qty_to_pop, row.posting_date])
qty_to_pop = 0
else:
# qty to pop < slot qty, ample balance
@@ -333,6 +339,33 @@
self.transferred_item_details[transfer_key].append([qty_to_pop, slot[1]])
qty_to_pop = 0
+ def __adjust_incoming_transfer_qty(self, transfer_data: Dict, fifo_queue: List, row: Dict):
+ "Add previously removed stock back to FIFO Queue."
+ transfer_qty_to_pop = flt(row.actual_qty)
+
+ def add_to_fifo_queue(slot):
+ if fifo_queue and flt(fifo_queue[0][0]) <= 0:
+ # neutralize 0/negative stock by adding positive stock
+ fifo_queue[0][0] += flt(slot[0])
+ fifo_queue[0][1] = slot[1]
+ else:
+ fifo_queue.append(slot)
+
+ while transfer_qty_to_pop:
+ if transfer_data and 0 < transfer_data[0][0] <= transfer_qty_to_pop:
+ # bucket qty is not enough, consume whole
+ transfer_qty_to_pop -= transfer_data[0][0]
+ add_to_fifo_queue(transfer_data.pop(0))
+ elif not transfer_data:
+ # transfer bucket is empty, extra incoming qty
+ add_to_fifo_queue([transfer_qty_to_pop, row.posting_date])
+ transfer_qty_to_pop = 0
+ else:
+ # ample bucket qty to consume
+ transfer_data[0][0] -= transfer_qty_to_pop
+ add_to_fifo_queue([transfer_qty_to_pop, transfer_data[0][1]])
+ transfer_qty_to_pop = 0
+
def __update_balances(self, row: Dict, key: Union[Tuple, str]):
self.item_details[key]["qty_after_transaction"] = row.qty_after_transaction
diff --git a/erpnext/stock/report/stock_ageing/stock_ageing_fifo_logic.md b/erpnext/stock/report/stock_ageing/stock_ageing_fifo_logic.md
index 9e9bed4..3d759dd 100644
--- a/erpnext/stock/report/stock_ageing/stock_ageing_fifo_logic.md
+++ b/erpnext/stock/report/stock_ageing/stock_ageing_fifo_logic.md
@@ -71,4 +71,39 @@
2nd | -60 | [[-10, 1-12-2021]]
3rd | +5 | [[-5, 3-12-2021]]
4th | +10 | [[5, 4-12-2021]]
-4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
\ No newline at end of file
+4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
+
+### Concept of Transfer Qty Bucket
+In the case of **Repack**, Quantity that comes in, isn't really incoming. It is just new stock repurposed from old stock, due to incoming-outgoing of the same warehouse.
+
+Here, stock is consumed from the FIFO Queue. It is then re-added back to the queue.
+While adding stock back to the queue we need to know how much to add.
+For this we need to keep track of how much was previously consumed.
+Hence we use **Transfer Qty Bucket**.
+
+While re-adding stock, we try to add buckets that were consumed earlier (date intact), to maintain correctness.
+
+#### Case 1: Same Item-Warehouse in Repack
+Eg:
+-------------------------------------------------------------------------------------
+Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
+-------------------------------------------------------------------------------------
+1st | +500 | PR | [[500, 1-12-2021]] |
+2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
+2nd | +50 | Repack | [[450, 1-12-2021], [50, 1-12-2021]] | []
+
+- The balance at the end is restored back to 500
+- However, the initial 500 qty bucket is now split into 450 and 50, with the same date
+- The net effect is the same as that before the Repack
+
+#### Case 2: Same Item-Warehouse in Repack with Split Consumption rows
+Eg:
+-------------------------------------------------------------------------------------
+Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
+-------------------------------------------------------------------------------------
+1st | +500 | PR | [[500, 1-12-2021]] |
+2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
+2nd | -50 | Repack | [[400, 1-12-2021]] | [[50, 1-12-2021],
+- | | | |[50, 1-12-2021]]
+2nd | +100 | Repack | [[400, 1-12-2021], [50, 1-12-2021], | []
+- | | | [50, 1-12-2021]] |
diff --git a/erpnext/stock/report/stock_ageing/test_stock_ageing.py b/erpnext/stock/report/stock_ageing/test_stock_ageing.py
index 66d2f6b..3fc357e 100644
--- a/erpnext/stock/report/stock_ageing/test_stock_ageing.py
+++ b/erpnext/stock/report/stock_ageing/test_stock_ageing.py
@@ -3,7 +3,7 @@
import frappe
-from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots
+from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots, format_report_data
from erpnext.tests.utils import ERPNextTestCase
@@ -11,7 +11,8 @@
def setUp(self) -> None:
self.filters = frappe._dict(
company="_Test Company",
- to_date="2021-12-10"
+ to_date="2021-12-10",
+ range1=30, range2=60, range3=90
)
def test_normal_inward_outward_queue(self):
@@ -236,6 +237,371 @@
item_wh_balances = [item_wh_wise_slots.get(i).get("qty_after_transaction") for i in item_wh_wise_slots]
self.assertEqual(sum(item_wh_balances), item_result["qty_after_transaction"])
+ def test_repack_entry_same_item_split_rows(self):
+ """
+ Split consumption rows and have single repacked item row (same warehouse).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | 500 | 001
+ Item 1 | -50 | 002 (repack)
+ Item 1 | -50 | 002 (repack)
+ Item 1 | 100 | 002 (repack)
+
+ Case most likely for batch items. Test time bucket computation.
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=500, qty_after_transaction=500,
+ warehouse="WH 1",
+ posting_date="2021-12-03", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=450,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=400,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=100, qty_after_transaction=500,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ slots = FIFOSlots(self.filters, sle).generate()
+ item_result = slots["Flask Item"]
+ queue = item_result["fifo_queue"]
+
+ self.assertEqual(item_result["total_qty"], 500.0)
+ self.assertEqual(queue[0][0], 400.0)
+ self.assertEqual(queue[1][0], 50.0)
+ self.assertEqual(queue[2][0], 50.0)
+ # check if time buckets add up to balance qty
+ self.assertEqual(sum([i[0] for i in queue]), 500.0)
+
+ def test_repack_entry_same_item_overconsume(self):
+ """
+ Over consume item and have less repacked item qty (same warehouse).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | 500 | 001
+ Item 1 | -100 | 002 (repack)
+ Item 1 | 50 | 002 (repack)
+
+ Case most likely for batch items. Test time bucket computation.
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=500, qty_after_transaction=500,
+ warehouse="WH 1",
+ posting_date="2021-12-03", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-100), qty_after_transaction=400,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=50, qty_after_transaction=450,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ slots = FIFOSlots(self.filters, sle).generate()
+ item_result = slots["Flask Item"]
+ queue = item_result["fifo_queue"]
+
+ self.assertEqual(item_result["total_qty"], 450.0)
+ self.assertEqual(queue[0][0], 400.0)
+ self.assertEqual(queue[1][0], 50.0)
+ # check if time buckets add up to balance qty
+ self.assertEqual(sum([i[0] for i in queue]), 450.0)
+
+ def test_repack_entry_same_item_overconsume_with_split_rows(self):
+ """
+ Over consume item and have less repacked item qty (same warehouse).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | 20 | 001
+ Item 1 | -50 | 002 (repack)
+ Item 1 | -50 | 002 (repack)
+ Item 1 | 50 | 002 (repack)
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=20, qty_after_transaction=20,
+ warehouse="WH 1",
+ posting_date="2021-12-03", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=(-30),
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=(-80),
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=50, qty_after_transaction=(-30),
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ fifo_slots = FIFOSlots(self.filters, sle)
+ slots = fifo_slots.generate()
+ item_result = slots["Flask Item"]
+ queue = item_result["fifo_queue"]
+
+ self.assertEqual(item_result["total_qty"], -30.0)
+ self.assertEqual(queue[0][0], -30.0)
+
+ # check transfer bucket
+ transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
+ self.assertEqual(transfer_bucket[0][0], 50)
+
+ def test_repack_entry_same_item_overproduce(self):
+ """
+ Under consume item and have more repacked item qty (same warehouse).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | 500 | 001
+ Item 1 | -50 | 002 (repack)
+ Item 1 | 100 | 002 (repack)
+
+ Case most likely for batch items. Test time bucket computation.
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=500, qty_after_transaction=500,
+ warehouse="WH 1",
+ posting_date="2021-12-03", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=450,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=100, qty_after_transaction=550,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ slots = FIFOSlots(self.filters, sle).generate()
+ item_result = slots["Flask Item"]
+ queue = item_result["fifo_queue"]
+
+ self.assertEqual(item_result["total_qty"], 550.0)
+ self.assertEqual(queue[0][0], 450.0)
+ self.assertEqual(queue[1][0], 50.0)
+ self.assertEqual(queue[2][0], 50.0)
+ # check if time buckets add up to balance qty
+ self.assertEqual(sum([i[0] for i in queue]), 550.0)
+
+ def test_repack_entry_same_item_overproduce_with_split_rows(self):
+ """
+ Over consume item and have less repacked item qty (same warehouse).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | 20 | 001
+ Item 1 | -50 | 002 (repack)
+ Item 1 | 50 | 002 (repack)
+ Item 1 | 50 | 002 (repack)
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=20, qty_after_transaction=20,
+ warehouse="WH 1",
+ posting_date="2021-12-03", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=(-30),
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=50, qty_after_transaction=20,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict(
+ name="Flask Item",
+ actual_qty=50, qty_after_transaction=70,
+ warehouse="WH 1",
+ posting_date="2021-12-04", voucher_type="Stock Entry",
+ voucher_no="002",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ fifo_slots = FIFOSlots(self.filters, sle)
+ slots = fifo_slots.generate()
+ item_result = slots["Flask Item"]
+ queue = item_result["fifo_queue"]
+
+ self.assertEqual(item_result["total_qty"], 70.0)
+ self.assertEqual(queue[0][0], 20.0)
+ self.assertEqual(queue[1][0], 50.0)
+
+ # check transfer bucket
+ transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
+ self.assertFalse(transfer_bucket)
+
+ def test_negative_stock_same_voucher(self):
+ """
+ Test negative stock scenario in transfer bucket via repack entry (same wh).
+ Ledger:
+ Item | Qty | Voucher
+ ------------------------
+ Item 1 | -50 | 001
+ Item 1 | -50 | 001
+ Item 1 | 30 | 001
+ Item 1 | 80 | 001
+ """
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=(-50),
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=(-50), qty_after_transaction=(-100),
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=30, qty_after_transaction=(-70),
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+ fifo_slots = FIFOSlots(self.filters, sle)
+ slots = fifo_slots.generate()
+ item_result = slots["Flask Item"]
+
+ # check transfer bucket
+ transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
+ self.assertEqual(transfer_bucket[0][0], 20)
+ self.assertEqual(transfer_bucket[1][0], 50)
+ self.assertEqual(item_result["fifo_queue"][0][0], -70.0)
+
+ sle.append(frappe._dict(
+ name="Flask Item",
+ actual_qty=80, qty_after_transaction=10,
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ))
+
+ fifo_slots = FIFOSlots(self.filters, sle)
+ slots = fifo_slots.generate()
+ item_result = slots["Flask Item"]
+
+ transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
+ self.assertFalse(transfer_bucket)
+ self.assertEqual(item_result["fifo_queue"][0][0], 10.0)
+
+ def test_precision(self):
+ "Test if final balance qty is rounded off correctly."
+ sle = [
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=0.3, qty_after_transaction=0.3,
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ frappe._dict( # stock up item
+ name="Flask Item",
+ actual_qty=0.6, qty_after_transaction=0.9,
+ warehouse="WH 1",
+ posting_date="2021-12-01", voucher_type="Stock Entry",
+ voucher_no="001",
+ has_serial_no=False, serial_no=None
+ ),
+ ]
+
+ slots = FIFOSlots(self.filters, sle).generate()
+ report_data = format_report_data(self.filters, slots, self.filters["to_date"])
+ row = report_data[0] # first row in report
+ bal_qty = row[5]
+ range_qty_sum = sum([i for i in row[7:11]]) # get sum of range balance
+
+ # check if value of Available Qty column matches with range bucket post format
+ self.assertEqual(bal_qty, 0.9)
+ self.assertEqual(bal_qty, range_qty_sum)
+
def generate_item_and_item_wh_wise_slots(filters, sle):
"Return results with and without 'show_warehouse_wise_stock'"
item_wise_slots = FIFOSlots(filters, sle).generate()
diff --git a/erpnext/tests/test_zform_loads.py b/erpnext/tests/test_zform_loads.py
new file mode 100644
index 0000000..b6fb636
--- /dev/null
+++ b/erpnext/tests/test_zform_loads.py
@@ -0,0 +1,30 @@
+""" dumb test to check all function calls on known form loads """
+
+import unittest
+
+import frappe
+from frappe.desk.form.load import getdoc
+
+
+class TestFormLoads(unittest.TestCase):
+
+ def test_load(self):
+ erpnext_modules = frappe.get_all("Module Def", filters={"app_name": "erpnext"}, pluck="name")
+ doctypes = frappe.get_all("DocType", {"istable": 0, "issingle": 0, "is_virtual": 0, "module": ("in", erpnext_modules)}, pluck="name")
+
+ for doctype in doctypes:
+ last_doc = frappe.db.get_value(doctype, {}, "name", order_by="modified desc")
+ if not last_doc:
+ continue
+ with self.subTest(msg=f"Loading {doctype} - {last_doc}", doctype=doctype, last_doc=last_doc):
+ try:
+ # reset previous response
+ frappe.response = frappe._dict({"docs":[]})
+ frappe.response.docinfo = None
+
+ getdoc(doctype, last_doc)
+ except Exception as e:
+ self.fail(f"Failed to load {doctype} - {last_doc}: {e}")
+
+ self.assertTrue(frappe.response.docs, msg=f"expected document in reponse, found: {frappe.response.docs}")
+ self.assertTrue(frappe.response.docinfo, msg=f"expected docinfo in reponse, found: {frappe.response.docinfo}")
diff --git a/erpnext/translations/de.csv b/erpnext/translations/de.csv
index cf73564..f345a87 100644
--- a/erpnext/translations/de.csv
+++ b/erpnext/translations/de.csv
@@ -1597,6 +1597,7 @@
Middle Income,Mittleres Einkommen,
Middle Name,Zweiter Vorname,
Middle Name (Optional),Weiterer Vorname (optional),
+Milestonde,Meilenstein,
Min Amt can not be greater than Max Amt,Min. Amt kann nicht größer als Max. Amt sein,
Min Qty can not be greater than Max Qty,Mindestmenge kann nicht größer als Maximalmenge sein,
Minimum Lead Age (Days),Mindest Lead-Alter (in Tagen),