fix: old data reposting causing low server disk space
diff --git a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.js b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.js
index 8aec532..40748ce 100644
--- a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.js
+++ b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.js
@@ -59,6 +59,7 @@
 			if (frm.doc.status == 'In Progress') {
 				frm.doc.current_index = data.current_index;
 				frm.doc.items_to_be_repost = data.items_to_be_repost;
+				frm.doc.total_reposting_count = data.total_reposting_count;
 
 				frm.dashboard.reset();
 				frm.trigger('show_reposting_progress');
@@ -95,6 +96,11 @@
 		var bars = [];
 
 		let total_count = frm.doc.items_to_be_repost ? JSON.parse(frm.doc.items_to_be_repost).length : 0;
+
+		if (frm.doc?.total_reposting_count) {
+			total_count = frm.doc.total_reposting_count;
+		}
+
 		let progress = flt(cint(frm.doc.current_index) / total_count * 100, 2) || 0.5;
 		var title = __('Reposting Completed {0}%', [progress]);
 
diff --git a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.json b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.json
index 8a5309c..1c5b521 100644
--- a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.json
+++ b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.json
@@ -22,11 +22,15 @@
   "amended_from",
   "error_section",
   "error_log",
+  "reposting_info_section",
+  "reposting_data_file",
   "items_to_be_repost",
-  "affected_transactions",
   "distinct_item_and_warehouse",
+  "column_break_o1sj",
+  "total_reposting_count",
   "current_index",
-  "gl_reposting_index"
+  "gl_reposting_index",
+  "affected_transactions"
  ],
  "fields": [
   {
@@ -191,13 +195,36 @@
    "fieldtype": "Int",
    "hidden": 1,
    "label": "GL reposting index",
+   "no_copy": 1,
+   "read_only": 1
+  },
+  {
+   "fieldname": "reposting_info_section",
+   "fieldtype": "Section Break",
+   "label": "Reposting Info"
+  },
+  {
+   "fieldname": "column_break_o1sj",
+   "fieldtype": "Column Break"
+  },
+  {
+   "fieldname": "total_reposting_count",
+   "fieldtype": "Int",
+   "label": "Total Reposting Count",
+   "no_copy": 1,
+   "read_only": 1
+  },
+  {
+   "fieldname": "reposting_data_file",
+   "fieldtype": "Attach",
+   "label": "Reposting Data File",
    "read_only": 1
   }
  ],
  "index_web_pages_for_search": 1,
  "is_submittable": 1,
  "links": [],
- "modified": "2022-11-28 16:00:05.637440",
+ "modified": "2023-05-31 12:48:57.138693",
  "modified_by": "Administrator",
  "module": "Stock",
  "name": "Repost Item Valuation",
diff --git a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.py b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.py
index d3bcab7..d5fc710 100644
--- a/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.py
+++ b/erpnext/stock/doctype/repost_item_valuation/repost_item_valuation.py
@@ -3,6 +3,7 @@
 
 import frappe
 from frappe import _
+from frappe.desk.form.load import get_attachments
 from frappe.exceptions import QueryDeadlockError, QueryTimeoutError
 from frappe.model.document import Document
 from frappe.query_builder import DocType, Interval
@@ -95,6 +96,12 @@
 
 		self.allow_negative_stock = 1
 
+	def on_cancel(self):
+		self.clear_attachment()
+
+	def on_trash(self):
+		self.clear_attachment()
+
 	def set_company(self):
 		if self.based_on == "Transaction":
 			self.company = frappe.get_cached_value(self.voucher_type, self.voucher_no, "company")
@@ -110,6 +117,14 @@
 		if write:
 			self.db_set("status", self.status)
 
+	def clear_attachment(self):
+		if attachments := get_attachments(self.doctype, self.name):
+			attachment = attachments[0]
+			frappe.delete_doc("File", attachment.name)
+
+		if self.reposting_data_file:
+			self.db_set("reposting_data_file", None)
+
 	def on_submit(self):
 		"""During tests reposts are executed immediately.
 
diff --git a/erpnext/stock/stock_ledger.py b/erpnext/stock/stock_ledger.py
index 6106809..12b9641 100644
--- a/erpnext/stock/stock_ledger.py
+++ b/erpnext/stock/stock_ledger.py
@@ -6,10 +6,21 @@
 from typing import Optional, Set, Tuple
 
 import frappe
-from frappe import _
+from frappe import _, scrub
 from frappe.model.meta import get_field_precision
 from frappe.query_builder.functions import CombineDatetime, Sum
-from frappe.utils import cint, cstr, flt, get_link_to_form, getdate, now, nowdate
+from frappe.utils import (
+	cint,
+	cstr,
+	flt,
+	get_link_to_form,
+	getdate,
+	gzip_compress,
+	gzip_decompress,
+	now,
+	nowdate,
+	parse_json,
+)
 
 import erpnext
 from erpnext.stock.doctype.bin.bin import update_qty as update_bin_qty
@@ -211,14 +222,18 @@
 	if not args:
 		args = []  # set args to empty list if None to avoid enumerate error
 
+	reposting_data = {}
+	if doc and doc.reposting_data_file:
+		reposting_data = get_reposting_data(doc.reposting_data_file)
+
 	items_to_be_repost = get_items_to_be_repost(
-		voucher_type=voucher_type, voucher_no=voucher_no, doc=doc
+		voucher_type=voucher_type, voucher_no=voucher_no, doc=doc, reposting_data=reposting_data
 	)
 	if items_to_be_repost:
 		args = items_to_be_repost
 
-	distinct_item_warehouses = get_distinct_item_warehouse(args, doc)
-	affected_transactions = get_affected_transactions(doc)
+	distinct_item_warehouses = get_distinct_item_warehouse(args, doc, reposting_data=reposting_data)
+	affected_transactions = get_affected_transactions(doc, reposting_data=reposting_data)
 
 	i = get_current_index(doc) or 0
 	while i < len(args):
@@ -261,6 +276,28 @@
 			)
 
 
+def get_reposting_data(file_path) -> dict:
+	file_name = frappe.db.get_value(
+		"File",
+		{
+			"file_url": file_path,
+			"attached_to_field": "reposting_data_file",
+		},
+		"name",
+	)
+
+	if not file_name:
+		return frappe._dict()
+
+	attached_file = frappe.get_doc("File", file_name)
+
+	data = gzip_decompress(attached_file.get_content())
+	if data := json.loads(data.decode("utf-8")):
+		data = data
+
+	return parse_json(data)
+
+
 def validate_item_warehouse(args):
 	for field in ["item_code", "warehouse", "posting_date", "posting_time"]:
 		if args.get(field) in [None, ""]:
@@ -271,28 +308,107 @@
 def update_args_in_repost_item_valuation(
 	doc, index, args, distinct_item_warehouses, affected_transactions
 ):
-	doc.db_set(
-		{
-			"items_to_be_repost": json.dumps(args, default=str),
-			"distinct_item_and_warehouse": json.dumps(
-				{str(k): v for k, v in distinct_item_warehouses.items()}, default=str
-			),
-			"current_index": index,
-			"affected_transactions": frappe.as_json(affected_transactions),
-		}
-	)
+	if not doc.items_to_be_repost:
+		file_name = ""
+		if doc.reposting_data_file:
+			file_name = get_reposting_file_name(doc.doctype, doc.name)
+			# frappe.delete_doc("File", file_name, ignore_permissions=True, delete_permanently=True)
+
+		doc.reposting_data_file = create_json_gz_file(
+			{
+				"items_to_be_repost": args,
+				"distinct_item_and_warehouse": {str(k): v for k, v in distinct_item_warehouses.items()},
+				"affected_transactions": affected_transactions,
+			},
+			doc,
+			file_name,
+		)
+
+		doc.db_set(
+			{
+				"current_index": index,
+				"total_reposting_count": len(args),
+				"reposting_data_file": doc.reposting_data_file,
+			}
+		)
+
+	else:
+		doc.db_set(
+			{
+				"items_to_be_repost": json.dumps(args, default=str),
+				"distinct_item_and_warehouse": json.dumps(
+					{str(k): v for k, v in distinct_item_warehouses.items()}, default=str
+				),
+				"current_index": index,
+				"affected_transactions": frappe.as_json(affected_transactions),
+			}
+		)
 
 	if not frappe.flags.in_test:
 		frappe.db.commit()
 
 	frappe.publish_realtime(
 		"item_reposting_progress",
-		{"name": doc.name, "items_to_be_repost": json.dumps(args, default=str), "current_index": index},
+		{
+			"name": doc.name,
+			"items_to_be_repost": json.dumps(args, default=str),
+			"current_index": index,
+			"total_reposting_count": len(args),
+		},
 	)
 
 
-def get_items_to_be_repost(voucher_type=None, voucher_no=None, doc=None):
+def get_reposting_file_name(dt, dn):
+	return frappe.db.get_value(
+		"File",
+		{
+			"attached_to_doctype": dt,
+			"attached_to_name": dn,
+			"attached_to_field": "reposting_data_file",
+		},
+		"name",
+	)
+
+
+def create_json_gz_file(data, doc, file_name=None) -> str:
+	encoded_content = frappe.safe_encode(frappe.as_json(data))
+	compressed_content = gzip_compress(encoded_content)
+
+	if not file_name:
+		json_filename = f"{scrub(doc.doctype)}-{scrub(doc.name)}.json.gz"
+		_file = frappe.get_doc(
+			{
+				"doctype": "File",
+				"file_name": json_filename,
+				"attached_to_doctype": doc.doctype,
+				"attached_to_name": doc.name,
+				"attached_to_field": "reposting_data_file",
+				"content": compressed_content,
+				"is_private": 1,
+			}
+		)
+		_file.save(ignore_permissions=True)
+
+		return _file.file_url
+	else:
+		file_doc = frappe.get_doc("File", file_name)
+		path = file_doc.get_full_path()
+
+		with open(path, "wb") as f:
+			f.write(compressed_content)
+
+		return doc.reposting_data_file
+
+
+def get_items_to_be_repost(voucher_type=None, voucher_no=None, doc=None, reposting_data=None):
+	if not reposting_data and doc and doc.reposting_data_file:
+		reposting_data = get_reposting_data(doc.reposting_data_file)
+
+	if reposting_data and reposting_data.items_to_be_repost:
+		return reposting_data.items_to_be_repost
+
 	items_to_be_repost = []
+
 	if doc and doc.items_to_be_repost:
 		items_to_be_repost = json.loads(doc.items_to_be_repost) or []
 
@@ -308,8 +424,15 @@
 	return items_to_be_repost or []
 
 
-def get_distinct_item_warehouse(args=None, doc=None):
+def get_distinct_item_warehouse(args=None, doc=None, reposting_data=None):
+	if not reposting_data and doc and doc.reposting_data_file:
+		reposting_data = get_reposting_data(doc.reposting_data_file)
+
+	if reposting_data and reposting_data.distinct_item_and_warehouse:
+		return reposting_data.distinct_item_and_warehouse
+
 	distinct_item_warehouses = {}
+
 	if doc and doc.distinct_item_and_warehouse:
 		distinct_item_warehouses = json.loads(doc.distinct_item_and_warehouse)
 		distinct_item_warehouses = {
@@ -324,7 +447,13 @@
 	return distinct_item_warehouses
 
 
-def get_affected_transactions(doc) -> Set[Tuple[str, str]]:
+def get_affected_transactions(doc, reposting_data=None) -> Set[Tuple[str, str]]:
+	if not reposting_data and doc and doc.reposting_data_file:
+		reposting_data = get_reposting_data(doc.reposting_data_file)
+
+	if reposting_data and reposting_data.affected_transactions:
+		return {tuple(transaction) for transaction in reposting_data.affected_transactions}
+
 	if not doc.affected_transactions:
 		return set()