Merge pull request #21491 from gavindsouza/tally-migration-feat

feat(Tally migration): Errored documents handling
diff --git a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.js b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.js
index d84c823..fd16d1e 100644
--- a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.js
+++ b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.js
@@ -1,7 +1,9 @@
 // Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
 // For license information, please see license.txt
 
-frappe.ui.form.on('Tally Migration', {
+frappe.provide("erpnext.tally_migration");
+
+frappe.ui.form.on("Tally Migration", {
 	onload: function (frm) {
 		let reload_status = true;
 		frappe.realtime.on("tally_migration_progress_update", function (data) {
@@ -35,7 +37,17 @@
 			}
 		});
 	},
+
 	refresh: function (frm) {
+		frm.trigger("show_logs_preview");
+		erpnext.tally_migration.failed_import_log = JSON.parse(frm.doc.failed_import_log);
+		erpnext.tally_migration.fixed_errors_log = JSON.parse(frm.doc.fixed_errors_log);
+
+		["default_round_off_account", "default_warehouse", "default_cost_center"].forEach(account => {
+			frm.toggle_reqd(account, frm.doc.is_master_data_imported === 1)
+			frm.toggle_enable(account, frm.doc.is_day_book_data_processed != 1)
+		})
+
 		if (frm.doc.master_data && !frm.doc.is_master_data_imported) {
 			if (frm.doc.is_master_data_processed) {
 				if (frm.doc.status != "Importing Master Data") {
@@ -47,6 +59,7 @@
 				}
 			}
 		}
+
 		if (frm.doc.day_book_data && !frm.doc.is_day_book_data_imported) {
 			if (frm.doc.is_day_book_data_processed) {
 				if (frm.doc.status != "Importing Day Book Data") {
@@ -59,6 +72,17 @@
 			}
 		}
 	},
+
+	erpnext_company: function (frm) {
+		frappe.db.exists("Company", frm.doc.erpnext_company).then(exists => {
+			if (exists) {
+				frappe.msgprint(
+					__("Company {0} already exists. Continuing will overwrite the Company and Chart of Accounts", [frm.doc.erpnext_company]),
+				);
+			}
+		});
+	},
+
 	add_button: function (frm, label, method) {
 		frm.add_custom_button(
 			label,
@@ -71,5 +95,255 @@
 				frm.reload_doc();
 			}
 		);
+	},
+
+	render_html_table(frm, shown_logs, hidden_logs, field) {
+		if (shown_logs && shown_logs.length > 0) {
+			frm.toggle_display(field, true);
+		} else {
+			frm.toggle_display(field, false);
+			return
+		}
+		let rows = erpnext.tally_migration.get_html_rows(shown_logs, field);
+		let rows_head, table_caption;
+
+		let table_footer = (hidden_logs && (hidden_logs.length > 0)) ? `<tr class="text-muted">
+				<td colspan="4">And ${hidden_logs.length} more others</td>
+			</tr>`: "";
+
+		if (field === "fixed_error_log_preview") {
+			rows_head = `<th width="75%">${__("Meta Data")}</th>
+			<th width="10%">${__("Unresolve")}</th>`
+			table_caption = "Resolved Issues"
+		} else {
+			rows_head = `<th width="75%">${__("Error Message")}</th>
+			<th width="10%">${__("Create")}</th>`
+			table_caption = "Error Log"
+		}
+
+		frm.get_field(field).$wrapper.html(`
+			<table class="table table-bordered">
+				<caption>${table_caption}</caption>
+				<tr class="text-muted">
+					<th width="5%">${__("#")}</th>
+					<th width="10%">${__("DocType")}</th>
+					${rows_head}
+				</tr>
+				${rows}
+				${table_footer}
+			</table>
+		`);
+	},
+
+	show_error_summary(frm) {
+		let summary = erpnext.tally_migration.failed_import_log.reduce((summary, row) => {
+			if (row.doc) {
+				if (summary[row.doc.doctype]) {
+					summary[row.doc.doctype] += 1;
+				} else {
+					summary[row.doc.doctype] = 1;
+				}
+			}
+			return summary
+		}, {});
+		console.table(summary);
+	},
+
+	show_logs_preview(frm) {
+		let empty = "[]";
+		let import_log = frm.doc.failed_import_log || empty;
+		let completed_log = frm.doc.fixed_errors_log || empty;
+		let render_section = !(import_log === completed_log && import_log === empty);
+
+		frm.toggle_display("import_log_section", render_section);
+		if (render_section) {
+			frm.trigger("show_error_summary");
+			frm.trigger("show_errored_import_log");
+			frm.trigger("show_fixed_errors_log");
+		}
+	},
+
+	show_errored_import_log(frm) {
+		let import_log = erpnext.tally_migration.failed_import_log;
+		let logs = import_log.slice(0, 20);
+		let hidden_logs = import_log.slice(20);
+
+		frm.events.render_html_table(frm, logs, hidden_logs, "failed_import_preview");
+	},
+
+	show_fixed_errors_log(frm) {
+		let completed_log = erpnext.tally_migration.fixed_errors_log;
+		let logs = completed_log.slice(0, 20);
+		let hidden_logs = completed_log.slice(20);
+
+		frm.events.render_html_table(frm, logs, hidden_logs, "fixed_error_log_preview");
 	}
 });
+
+erpnext.tally_migration.getError = (traceback) => {
+	/* Extracts the Error Message from the Python Traceback or Solved error */
+	let is_multiline = traceback.trim().indexOf("\n") != -1;
+	let message;
+
+	if (is_multiline) {
+		let exc_error_idx = traceback.trim().lastIndexOf("\n") + 1
+		let error_line = traceback.substr(exc_error_idx)
+		let split_str_idx = (error_line.indexOf(':') > 0) ? error_line.indexOf(':') + 1 : 0;
+		message = error_line.slice(split_str_idx).trim();
+	} else {
+		message = traceback;
+	}
+
+	return message
+}
+
+erpnext.tally_migration.cleanDoc = (obj) => {
+	/* Strips all null and empty values of your JSON object */
+	let temp = obj;
+	$.each(temp, function(key, value){
+		if (value === "" || value === null){
+			delete obj[key];
+		} else if (Object.prototype.toString.call(value) === '[object Object]') {
+			erpnext.tally_migration.cleanDoc(value);
+		} else if ($.isArray(value)) {
+			$.each(value, function (k,v) { erpnext.tally_migration.cleanDoc(v); });
+		}
+	});
+	return temp;
+}
+
+erpnext.tally_migration.unresolve = (document) => {
+	/* Mark document migration as unresolved ie. move to failed error log */
+	let frm = cur_frm;
+	let failed_log = erpnext.tally_migration.failed_import_log;
+	let fixed_log = erpnext.tally_migration.fixed_errors_log;
+
+	let modified_fixed_log = fixed_log.filter(row => {
+		if (!frappe.utils.deep_equal(erpnext.tally_migration.cleanDoc(row.doc), document)) {
+			return row
+		}
+	});
+
+	failed_log.push({ doc: document, exc: `Marked unresolved on ${Date()}` });
+
+	frm.doc.failed_import_log = JSON.stringify(failed_log);
+	frm.doc.fixed_errors_log = JSON.stringify(modified_fixed_log);
+
+	frm.dirty();
+	frm.save();
+}
+
+erpnext.tally_migration.resolve = (document) => {
+	/* Mark document migration as resolved ie. move to fixed error log */
+	let frm = cur_frm;
+	let failed_log = erpnext.tally_migration.failed_import_log;
+	let fixed_log = erpnext.tally_migration.fixed_errors_log;
+
+	let modified_failed_log = failed_log.filter(row => {
+		if (!frappe.utils.deep_equal(erpnext.tally_migration.cleanDoc(row.doc), document)) {
+			return row
+		}
+	});
+	fixed_log.push({ doc: document, exc: `Solved on ${Date()}` });
+
+	frm.doc.failed_import_log = JSON.stringify(modified_failed_log);
+	frm.doc.fixed_errors_log = JSON.stringify(fixed_log);
+
+	frm.dirty();
+	frm.save();
+}
+
+erpnext.tally_migration.create_new_doc = (document) => {
+	/* Mark as resolved and create new document */
+	erpnext.tally_migration.resolve(document);
+	return frappe.call({
+		type: "POST",
+		method: 'erpnext.erpnext_integrations.doctype.tally_migration.tally_migration.new_doc',
+		args: {
+			document
+		},
+		freeze: true,
+		callback: function(r) {
+			if(!r.exc) {
+				frappe.model.sync(r.message);
+				frappe.get_doc(r.message.doctype, r.message.name).__run_link_triggers = true;
+				frappe.set_route("Form", r.message.doctype, r.message.name);
+			}
+		}
+	});
+}
+
+erpnext.tally_migration.get_html_rows = (logs, field) => {
+	let index = 0;
+	let rows = logs
+		.map(({ doc, exc }) => {
+			let id = frappe.dom.get_unique_id();
+			let traceback = exc;
+
+			let error_message = erpnext.tally_migration.getError(traceback);
+			index++;
+
+			let show_traceback = `
+				<button class="btn btn-default btn-xs m-3" type="button" data-toggle="collapse" data-target="#${id}-traceback" aria-expanded="false" aria-controls="${id}-traceback">
+					${__("Show Traceback")}
+				</button>
+				<div class="collapse margin-top" id="${id}-traceback">
+					<div class="well">
+						<pre style="font-size: smaller;">${traceback}</pre>
+					</div>
+				</div>`;
+
+			let show_doc = `
+				<button class='btn btn-default btn-xs m-3' type='button' data-toggle='collapse' data-target='#${id}-doc' aria-expanded='false' aria-controls='${id}-doc'>
+					${__("Show Document")}
+				</button>
+				<div class="collapse margin-top" id="${id}-doc">
+					<div class="well">
+						<pre style="font-size: smaller;">${JSON.stringify(erpnext.tally_migration.cleanDoc(doc), null, 1)}</pre>
+					</div>
+				</div>`;
+
+			let create_button = `
+				<button class='btn btn-default btn-xs m-3' type='button' onclick='erpnext.tally_migration.create_new_doc(${JSON.stringify(doc)})'>
+					${__("Create Document")}
+				</button>`
+
+			let mark_as_unresolved = `
+				<button class='btn btn-default btn-xs m-3' type='button' onclick='erpnext.tally_migration.unresolve(${JSON.stringify(doc)})'>
+					${__("Mark as unresolved")}
+				</button>`
+
+			if (field === "fixed_error_log_preview") {
+				return `<tr>
+							<td>${index}</td>
+							<td>
+								<div>${doc.doctype}</div>
+							</td>
+							<td>
+								<div>${error_message}</div>
+								<div>${show_doc}</div>
+							</td>
+							<td>
+								<div>${mark_as_unresolved}</div>
+							</td>
+						</tr>`;
+			} else {
+				return `<tr>
+							<td>${index}</td>
+							<td>
+								<div>${doc.doctype}</div>
+							</td>
+							<td>
+								<div>${error_message}</div>
+								<div>${show_traceback}</div>
+								<div>${show_doc}</div>
+							</td>
+							<td>
+								<div>${create_button}</div>
+							</td>
+						</tr>`;
+			}
+		}).join("");
+
+	return rows
+}
\ No newline at end of file
diff --git a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.json b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.json
index dc6f093..417d943 100644
--- a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.json
+++ b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.json
@@ -28,14 +28,19 @@
   "vouchers",
   "accounts_section",
   "default_warehouse",
-  "round_off_account",
+  "default_round_off_account",
   "column_break_21",
   "default_cost_center",
   "day_book_section",
   "day_book_data",
   "column_break_27",
   "is_day_book_data_processed",
-  "is_day_book_data_imported"
+  "is_day_book_data_imported",
+  "import_log_section",
+  "failed_import_log",
+  "fixed_errors_log",
+  "failed_import_preview",
+  "fixed_error_log_preview"
  ],
  "fields": [
   {
@@ -57,6 +62,7 @@
    "fieldname": "tally_creditors_account",
    "fieldtype": "Data",
    "label": "Tally Creditors Account",
+   "read_only_depends_on": "eval:doc.is_master_data_processed==1",
    "reqd": 1
   },
   {
@@ -69,6 +75,7 @@
    "fieldname": "tally_debtors_account",
    "fieldtype": "Data",
    "label": "Tally Debtors Account",
+   "read_only_depends_on": "eval:doc.is_master_data_processed==1",
    "reqd": 1
   },
   {
@@ -92,7 +99,7 @@
    "fieldname": "erpnext_company",
    "fieldtype": "Data",
    "label": "ERPNext Company",
-   "read_only_depends_on": "eval:doc.is_master_data_processed == 1"
+   "read_only_depends_on": "eval:doc.is_master_data_processed==1"
   },
   {
    "fieldname": "processed_files_section",
@@ -136,6 +143,7 @@
   },
   {
    "depends_on": "is_master_data_imported",
+   "description": "The accounts are set by the system automatically but do confirm these defaults",
    "fieldname": "accounts_section",
    "fieldtype": "Section Break",
    "label": "Accounts"
@@ -147,12 +155,6 @@
    "options": "Warehouse"
   },
   {
-   "fieldname": "round_off_account",
-   "fieldtype": "Link",
-   "label": "Round Off Account",
-   "options": "Account"
-  },
-  {
    "fieldname": "column_break_21",
    "fieldtype": "Column Break"
   },
@@ -212,11 +214,47 @@
    "fieldname": "default_uom",
    "fieldtype": "Link",
    "label": "Default UOM",
-   "options": "UOM"
+   "options": "UOM",
+   "read_only_depends_on": "eval:doc.is_master_data_imported==1"
+  },
+  {
+   "default": "[]",
+   "fieldname": "failed_import_log",
+   "fieldtype": "Code",
+   "hidden": 1,
+   "options": "JSON"
+  },
+  {
+   "fieldname": "failed_import_preview",
+   "fieldtype": "HTML",
+   "label": "Failed Import Log"
+  },
+  {
+   "fieldname": "import_log_section",
+   "fieldtype": "Section Break",
+   "label": "Import Log"
+  },
+  {
+   "fieldname": "default_round_off_account",
+   "fieldtype": "Link",
+   "label": "Default Round Off Account",
+   "options": "Account"
+  },
+  {
+   "default": "[]",
+   "fieldname": "fixed_errors_log",
+   "fieldtype": "Code",
+   "hidden": 1,
+   "options": "JSON"
+  },
+  {
+   "fieldname": "fixed_error_log_preview",
+   "fieldtype": "HTML",
+   "label": "Fixed Error Log"
   }
  ],
  "links": [],
- "modified": "2020-04-16 13:03:28.894919",
+ "modified": "2020-04-28 00:29:18.039826",
  "modified_by": "Administrator",
  "module": "ERPNext Integrations",
  "name": "Tally Migration",
diff --git a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.py b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.py
index 13474e1..462685f 100644
--- a/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.py
+++ b/erpnext/erpnext_integrations/doctype/tally_migration/tally_migration.py
@@ -6,6 +6,7 @@
 
 import json
 import re
+import sys
 import traceback
 import zipfile
 from decimal import Decimal
@@ -15,18 +16,34 @@
 import frappe
 from erpnext import encode_company_abbr
 from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
+from erpnext.accounts.doctype.chart_of_accounts_importer.chart_of_accounts_importer import unset_existing_data
+
 from frappe import _
 from frappe.custom.doctype.custom_field.custom_field import create_custom_field
 from frappe.model.document import Document
 from frappe.model.naming import getseries, revert_series_if_last
 from frappe.utils.data import format_datetime
 
-
 PRIMARY_ACCOUNT = "Primary"
 VOUCHER_CHUNK_SIZE = 500
 
 
+@frappe.whitelist()
+def new_doc(document):
+	document = json.loads(document)
+	doctype = document.pop("doctype")
+	document.pop("name", None)
+	doc = frappe.new_doc(doctype)
+	doc.update(document)
+
+	return doc
+
 class TallyMigration(Document):
+	def validate(self):
+		failed_import_log = json.loads(self.failed_import_log)
+		sorted_failed_import_log = sorted(failed_import_log, key=lambda row: row["doc"]["creation"])
+		self.failed_import_log = json.dumps(sorted_failed_import_log)
+
 	def autoname(self):
 		if not self.name:
 			self.name = "Tally Migration on " + format_datetime(self.creation)
@@ -65,9 +82,17 @@
 				"attached_to_name": self.name,
 				"content": json.dumps(value),
 				"is_private": True
-			}).insert()
+			})
+			try:
+				f.insert()
+			except frappe.DuplicateEntryError:
+				pass
 			setattr(self, key, f.file_url)
 
+	def set_account_defaults(self):
+		self.default_cost_center, self.default_round_off_account = frappe.db.get_value("Company", self.erpnext_company, ["cost_center", "round_off_account"])
+		self.default_warehouse = frappe.db.get_value("Stock Settings", "Stock Settings", "default_warehouse")
+
 	def _process_master_data(self):
 		def get_company_name(collection):
 			return collection.find_all("REMOTECMPINFO.LIST")[0].REMOTECMPNAME.string.strip()
@@ -84,7 +109,11 @@
 			children, parents = get_children_and_parent_dict(accounts)
 			group_set =  [acc[1] for acc in accounts if acc[2]]
 			children, customers, suppliers = remove_parties(parents, children, group_set)
-			coa = traverse({}, children, roots, roots, group_set)
+
+			try:
+				coa = traverse({}, children, roots, roots, group_set)
+			except RecursionError:
+				self.log(_("Error occured while parsing Chart of Accounts: Please make sure that no two accounts have the same name"))
 
 			for account in coa:
 				coa[account]["root_type"] = root_type_map[account]
@@ -126,14 +155,18 @@
 		def remove_parties(parents, children, group_set):
 			customers, suppliers = set(), set()
 			for account in parents:
+				found = False
 				if self.tally_creditors_account in parents[account]:
-					children.pop(account, None)
+					found = True
 					if account not in group_set:
 						suppliers.add(account)
-				elif self.tally_debtors_account in parents[account]:
-					children.pop(account, None)
+				if self.tally_debtors_account in parents[account]:
+					found = True
 					if account not in group_set:
 						customers.add(account)
+				if found:
+					children.pop(account, None)
+
 			return children, customers, suppliers
 
 		def traverse(tree, children, accounts, roots, group_set):
@@ -151,6 +184,7 @@
 			parties, addresses = [], []
 			for account in collection.find_all("LEDGER"):
 				party_type = None
+				links = []
 				if account.NAME.string.strip() in customers:
 					party_type = "Customer"
 					parties.append({
@@ -161,7 +195,9 @@
 						"territory": "All Territories",
 						"customer_type": "Individual",
 					})
-				elif account.NAME.string.strip() in suppliers:
+					links.append({"link_doctype": party_type, "link_name": account["NAME"]})
+
+				if account.NAME.string.strip() in suppliers:
 					party_type = "Supplier"
 					parties.append({
 						"doctype": party_type,
@@ -170,6 +206,8 @@
 						"supplier_group": "All Supplier Groups",
 						"supplier_type": "Individual",
 					})
+					links.append({"link_doctype": party_type, "link_name": account["NAME"]})
+
 				if party_type:
 					address = "\n".join([a.string.strip() for a in account.find_all("ADDRESS")])
 					addresses.append({
@@ -183,7 +221,7 @@
 						"mobile": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
 						"phone": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
 						"gstin": account.PARTYGSTIN.string.strip() if account.PARTYGSTIN else None,
-						"links": [{"link_doctype": party_type, "link_name": account["NAME"]}],
+						"links": links
 					})
 			return parties, addresses
 
@@ -242,12 +280,18 @@
 		def create_company_and_coa(coa_file_url):
 			coa_file = frappe.get_doc("File", {"file_url": coa_file_url})
 			frappe.local.flags.ignore_chart_of_accounts = True
-			company = frappe.get_doc({
-				"doctype": "Company",
-				"company_name": self.erpnext_company,
-				"default_currency": "INR",
-				"enable_perpetual_inventory": 0,
-			}).insert()
+
+			try:
+				company = frappe.get_doc({
+					"doctype": "Company",
+					"company_name": self.erpnext_company,
+					"default_currency": "INR",
+					"enable_perpetual_inventory": 0,
+				}).insert()
+			except frappe.DuplicateEntryError:
+				company = frappe.get_doc("Company", self.erpnext_company)
+				unset_existing_data(self.erpnext_company)
+
 			frappe.local.flags.ignore_chart_of_accounts = False
 			create_charts(company.name, custom_chart=json.loads(coa_file.get_content()))
 			company.create_default_warehouses()
@@ -256,36 +300,35 @@
 			parties_file = frappe.get_doc("File", {"file_url": parties_file_url})
 			for party in json.loads(parties_file.get_content()):
 				try:
-					frappe.get_doc(party).insert()
+					party_doc = frappe.get_doc(party)
+					party_doc.insert()
 				except:
-					self.log(party)
+					self.log(party_doc)
 			addresses_file = frappe.get_doc("File", {"file_url": addresses_file_url})
 			for address in json.loads(addresses_file.get_content()):
 				try:
-					frappe.get_doc(address).insert(ignore_mandatory=True)
+					address_doc = frappe.get_doc(address)
+					address_doc.insert(ignore_mandatory=True)
 				except:
-					try:
-						gstin = address.pop("gstin", None)
-						frappe.get_doc(address).insert(ignore_mandatory=True)
-						self.log({"address": address, "message": "Invalid GSTIN: {}. Address was created without GSTIN".format(gstin)})
-					except:
-						self.log(address)
+					self.log(address_doc)
 
 		def create_items_uoms(items_file_url, uoms_file_url):
 			uoms_file = frappe.get_doc("File", {"file_url": uoms_file_url})
 			for uom in json.loads(uoms_file.get_content()):
 				if not frappe.db.exists(uom):
 					try:
-						frappe.get_doc(uom).insert()
+						uom_doc = frappe.get_doc(uom)
+						uom_doc.insert()
 					except:
-						self.log(uom)
+						self.log(uom_doc)
 
 			items_file = frappe.get_doc("File", {"file_url": items_file_url})
 			for item in json.loads(items_file.get_content()):
 				try:
-					frappe.get_doc(item).insert()
+					item_doc = frappe.get_doc(item)
+					item_doc.insert()
 				except:
-					self.log(item)
+					self.log(item_doc)
 
 		try:
 			self.publish("Import Master Data", _("Creating Company and Importing Chart of Accounts"), 1, 4)
@@ -299,10 +342,13 @@
 
 			self.publish("Import Master Data", _("Done"), 4, 4)
 
+			self.set_account_defaults()
 			self.is_master_data_imported = 1
+			frappe.db.commit()
 
 		except:
 			self.publish("Import Master Data", _("Process Failed"), -1, 5)
+			frappe.db.rollback()
 			self.log()
 
 		finally:
@@ -323,7 +369,9 @@
 					processed_voucher = function(voucher)
 					if processed_voucher:
 						vouchers.append(processed_voucher)
+					frappe.db.commit()
 				except:
+					frappe.db.rollback()
 					self.log(voucher)
 			return vouchers
 
@@ -349,6 +397,7 @@
 			journal_entry = {
 				"doctype": "Journal Entry",
 				"tally_guid": voucher.GUID.string.strip(),
+				"tally_voucher_no": voucher.VOUCHERNUMBER.string.strip() if voucher.VOUCHERNUMBER else "",
 				"posting_date": voucher.DATE.string.strip(),
 				"company": self.erpnext_company,
 				"accounts": accounts,
@@ -377,6 +426,7 @@
 				"doctype": doctype,
 				party_field: voucher.PARTYNAME.string.strip(),
 				"tally_guid": voucher.GUID.string.strip(),
+				"tally_voucher_no": voucher.VOUCHERNUMBER.string.strip() if voucher.VOUCHERNUMBER else "",
 				"posting_date": voucher.DATE.string.strip(),
 				"due_date": voucher.DATE.string.strip(),
 				"items": get_voucher_items(voucher, doctype),
@@ -468,14 +518,21 @@
 				oldest_year = new_year
 
 		def create_custom_fields(doctypes):
-			for doctype in doctypes:
-				df = {
-					"fieldtype": "Data",
-					"fieldname": "tally_guid",
-					"read_only": 1,
-					"label": "Tally GUID"
-				}
-				create_custom_field(doctype, df)
+			tally_guid_df = {
+				"fieldtype": "Data",
+				"fieldname": "tally_guid",
+				"read_only": 1,
+				"label": "Tally GUID"
+			}
+			tally_voucher_no_df = {
+				"fieldtype": "Data",
+				"fieldname": "tally_voucher_no",
+				"read_only": 1,
+				"label": "Tally Voucher Number"
+			}
+			for df in [tally_guid_df, tally_voucher_no_df]:
+				for doctype in doctypes:
+					create_custom_field(doctype, df)
 
 		def create_price_list():
 			frappe.get_doc({
@@ -490,7 +547,7 @@
 		try:
 			frappe.db.set_value("Account", encode_company_abbr(self.tally_creditors_account, self.erpnext_company), "account_type", "Payable")
 			frappe.db.set_value("Account", encode_company_abbr(self.tally_debtors_account, self.erpnext_company), "account_type", "Receivable")
-			frappe.db.set_value("Company", self.erpnext_company, "round_off_account", self.round_off_account)
+			frappe.db.set_value("Company", self.erpnext_company, "round_off_account", self.default_round_off_account)
 
 			vouchers_file = frappe.get_doc("File", {"file_url": self.vouchers})
 			vouchers = json.loads(vouchers_file.get_content())
@@ -521,11 +578,14 @@
 
 		for index, voucher in enumerate(chunk, start=start):
 			try:
-				doc = frappe.get_doc(voucher).insert()
-				doc.submit()
+				voucher_doc = frappe.get_doc(voucher)
+				voucher_doc.insert()
+				voucher_doc.submit()
 				self.publish("Importing Vouchers", _("{} of {}").format(index, total), index, total)
+				frappe.db.commit()
 			except:
-				self.log(voucher)
+				frappe.db.rollback()
+				self.log(voucher_doc)
 
 		if is_last:
 			self.status = ""
@@ -551,9 +611,22 @@
 		frappe.enqueue_doc(self.doctype, self.name, "_import_day_book_data", queue="long", timeout=3600)
 
 	def log(self, data=None):
-		data = data or self.status
-		message = "\n".join(["Data:", json.dumps(data, default=str, indent=4), "--" * 50, "\nException:", traceback.format_exc()])
-		return frappe.log_error(title="Tally Migration Error", message=message)
+		if isinstance(data, frappe.model.document.Document):
+			if sys.exc_info()[1].__class__ != frappe.DuplicateEntryError:
+				failed_import_log = json.loads(self.failed_import_log)
+				doc = data.as_dict()
+				failed_import_log.append({
+					"doc": doc,
+					"exc": traceback.format_exc()
+				})
+				self.failed_import_log = json.dumps(failed_import_log, separators=(',', ':'))
+				self.save()
+				frappe.db.commit()
+
+		else:
+			data = data or self.status
+			message = "\n".join(["Data:", json.dumps(data, default=str, indent=4), "--" * 50, "\nException:", traceback.format_exc()])
+			return frappe.log_error(title="Tally Migration Error", message=message)
 
 	def set_status(self, status=""):
 		self.status = status