Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 1 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors |
| 2 | # License: GNU General Public License v3. See license.txt |
| 3 | |
| 4 | from __future__ import unicode_literals |
| 5 | from itertools import groupby |
| 6 | from operator import itemgetter |
| 7 | import frappe |
Aditya Hase | c888e52 | 2019-01-09 12:10:37 +0530 | [diff] [blame] | 8 | from frappe.utils import add_to_date, date_diff, getdate, nowdate |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 9 | from erpnext.accounts.report.general_ledger.general_ledger import execute |
| 10 | |
| 11 | |
Aditya Hase | 6cadf77 | 2019-01-09 12:50:50 +0530 | [diff] [blame] | 12 | def get(filters= None): |
| 13 | print(filters) |
| 14 | timespan = filters.get("timespan") |
| 15 | timegrain = filters.get("timegrain") |
Aditya Hase | 3b593e4 | 2019-01-10 12:22:00 +0530 | [diff] [blame] | 16 | account = filters.get("account") |
Aditya Hase | 4b171dd | 2019-01-15 14:36:23 +0530 | [diff] [blame^] | 17 | company = filters.get("company") |
| 18 | |
Aditya Hase | 6cadf77 | 2019-01-09 12:50:50 +0530 | [diff] [blame] | 19 | from_date = get_from_date_from_timespan(timespan) |
| 20 | to_date = nowdate() |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 21 | filters = frappe._dict({ |
Aditya Hase | 4b171dd | 2019-01-15 14:36:23 +0530 | [diff] [blame^] | 22 | "company": company, |
Aditya Hase | 6cadf77 | 2019-01-09 12:50:50 +0530 | [diff] [blame] | 23 | "from_date": from_date, |
| 24 | "to_date": to_date, |
Aditya Hase | 3b593e4 | 2019-01-10 12:22:00 +0530 | [diff] [blame] | 25 | "account": account, |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 26 | "group_by": "Group by Voucher (Consolidated)" |
| 27 | }) |
| 28 | report_columns, report_results = execute(filters=filters) |
| 29 | |
| 30 | interesting_fields = ["posting_date", "balance"] |
| 31 | |
| 32 | columns = [column for column in report_columns if column["fieldname"] in interesting_fields] |
| 33 | |
| 34 | _results = [] |
| 35 | for row in report_results[1:-2]: |
| 36 | _results.append([row[key] for key in interesting_fields]) |
| 37 | |
Aditya Hase | 5911e10 | 2019-01-09 13:09:31 +0530 | [diff] [blame] | 38 | _results = add_opening_balance(from_date, _results, report_results[0]) |
| 39 | |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 40 | grouped_results = groupby(_results, key=itemgetter(0)) |
| 41 | |
| 42 | results = [list(values)[-1] for key, values in grouped_results] |
| 43 | |
Aditya Hase | c888e52 | 2019-01-09 12:10:37 +0530 | [diff] [blame] | 44 | results = add_missing_dates(results, from_date, to_date) |
| 45 | |
Aditya Hase | 6cadf77 | 2019-01-09 12:50:50 +0530 | [diff] [blame] | 46 | results = granulate_results(results, from_date, to_date, timegrain) |
| 47 | |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 48 | return { |
| 49 | "labels": [result[0] for result in results], |
| 50 | "datasets": [{ |
Aditya Hase | 3b593e4 | 2019-01-10 12:22:00 +0530 | [diff] [blame] | 51 | "name": account, |
Aditya Hase | 953229f | 2019-01-08 23:06:23 +0530 | [diff] [blame] | 52 | "values": [result[1] for result in results] |
| 53 | }] |
| 54 | } |
Aditya Hase | 8896374 | 2019-01-09 11:18:17 +0530 | [diff] [blame] | 55 | |
| 56 | def get_from_date_from_timespan(timespan): |
| 57 | days = months = years = 0 |
| 58 | if "Last Week" == timespan: |
| 59 | days = -7 |
| 60 | if "Last Month" == timespan: |
| 61 | months = -1 |
| 62 | elif "Last Quarter" == timespan: |
| 63 | months = -3 |
| 64 | elif "Last Year" == timespan: |
| 65 | years = -1 |
| 66 | return add_to_date(None, years=years, months=months, days=days, |
| 67 | as_string=True, as_datetime=True) |
Aditya Hase | c888e52 | 2019-01-09 12:10:37 +0530 | [diff] [blame] | 68 | |
Aditya Hase | 5911e10 | 2019-01-09 13:09:31 +0530 | [diff] [blame] | 69 | |
| 70 | def add_opening_balance(from_date, _results, opening): |
| 71 | if not _results or (_results[0][0] != getdate(from_date)): |
| 72 | _results.insert(0, [from_date, opening.balance]) |
| 73 | return _results |
| 74 | |
Aditya Hase | c888e52 | 2019-01-09 12:10:37 +0530 | [diff] [blame] | 75 | def add_missing_dates(incomplete_results, from_date, to_date): |
| 76 | dates = [r[0] for r in incomplete_results] |
| 77 | day_count = date_diff(to_date, from_date) |
| 78 | |
| 79 | results_dict = dict(incomplete_results) |
| 80 | last_date, last_balance = incomplete_results[0] |
| 81 | results = [] |
| 82 | for date in (add_to_date(getdate(from_date), days=n) for n in range(day_count + 1)): |
| 83 | if date in results_dict: |
| 84 | last_date = date |
| 85 | last_balance = results_dict[date] |
| 86 | results.append([date, last_balance]) |
| 87 | return results |
Aditya Hase | 6cadf77 | 2019-01-09 12:50:50 +0530 | [diff] [blame] | 88 | |
| 89 | def get_dates_from_timegrain(from_date, to_date, timegrain): |
| 90 | days = months = years = 0 |
| 91 | if "Daily" == timegrain: |
| 92 | days = 1 |
| 93 | elif "Weekly" == timegrain: |
| 94 | days = 7 |
| 95 | elif "Monthly" == timegrain: |
| 96 | months = 1 |
| 97 | elif "Quarterly" == timegrain: |
| 98 | months = 3 |
| 99 | |
| 100 | dates = [from_date] |
| 101 | while dates[-1] <= to_date: |
| 102 | dates.append(add_to_date(dates[-1], years=years, months=months, days=days)) |
| 103 | return dates |
| 104 | |
| 105 | def granulate_results(incomplete_results, from_date, to_date, timegrain): |
| 106 | dates = set(get_dates_from_timegrain(getdate(from_date), getdate(to_date), timegrain)) |
| 107 | return list(filter(lambda x: x[0] in dates,incomplete_results)) |