Merge branch 'master' of https://github.com/webnotes/erpnext
Conflicts:
setup/doctype/backup_manager/backup_dropbox.py
setup/doctype/backup_manager/backup_manager.py
diff --git a/controllers/accounts_controller.py b/controllers/accounts_controller.py
index 73d7608..576921a 100644
--- a/controllers/accounts_controller.py
+++ b/controllers/accounts_controller.py
@@ -23,6 +23,9 @@
class AccountsController(TransactionBase):
def get_gl_dict(self, args, cancel=None):
"""this method populates the common properties of a gl entry record"""
+ if cancel is None:
+ cancel = (self.doc.docstatus == 2)
+
gl_dict = {
'company': self.doc.company,
'posting_date': self.doc.posting_date,
@@ -30,7 +33,7 @@
'voucher_no': self.doc.name,
'aging_date': self.doc.fields.get("aging_date") or self.doc.posting_date,
'remarks': self.doc.remarks,
- 'is_cancelled': self.doc.docstatus == 2 and "Yes" or "No",
+ 'is_cancelled': cancel and "Yes" or "No",
'fiscal_year': self.doc.fiscal_year,
'debit': 0,
'credit': 0,
diff --git a/patches/march_2013/p05_payment_reconciliation.py b/patches/march_2013/p05_payment_reconciliation.py
new file mode 100644
index 0000000..7b6306b
--- /dev/null
+++ b/patches/march_2013/p05_payment_reconciliation.py
@@ -0,0 +1,29 @@
+import webnotes
+
+def execute():
+ # delete wrong gle entries created due to a bug in make_gl_entries of Account Controller
+ # when using payment reconciliation
+ res = webnotes.conn.sql_list("""select distinct gl1.voucher_no
+ from `tabGL Entry` gl1, `tabGL Entry` gl2
+ where
+ date(gl1.modified) >= "2013-03-11"
+ and date(gl1.modified) = date(gl2.modified)
+ and gl1.voucher_no = gl2.voucher_no
+ and gl1.voucher_type = "Journal Voucher"
+ and gl1.voucher_type = gl2.voucher_type
+ and gl1.posting_date = gl2.posting_date
+ and gl1.account = gl2.account
+ and ifnull(gl1.is_cancelled, 'No') = 'No' and ifnull(gl2.is_cancelled, 'No') = 'No'
+ and ifnull(gl1.against_voucher, '') = ifnull(gl2.against_voucher, '')
+ and ifnull(gl1.against_voucher_type, '') = ifnull(gl2.against_voucher_type, '')
+ and gl1.remarks = gl2.remarks
+ and ifnull(gl1.debit, 0) = ifnull(gl2.credit, 0)
+ and ifnull(gl1.credit, 0) = ifnull(gl2.debit, 0)
+ and gl1.name > gl2.name""")
+
+ for r in res:
+ webnotes.conn.sql("""update `tabGL Entry` set `is_cancelled`='Yes'
+ where voucher_type='Journal Voucher' and voucher_no=%s""", r)
+ jv = webnotes.bean("Journal Voucher", r)
+ jv.run_method("make_gl_entries")
+
\ No newline at end of file
diff --git a/patches/patch_list.py b/patches/patch_list.py
index bc68ea1..f5e3f40 100644
--- a/patches/patch_list.py
+++ b/patches/patch_list.py
@@ -211,4 +211,5 @@
"patches.march_2013.p02_get_global_default",
"patches.march_2013.p03_rename_blog_to_blog_post",
"execute:webnotes.reload_doc('hr', 'search_criteria', 'monthly_attendance_details')",
+ "patches.march_2013.p05_payment_reconciliation",
]
\ No newline at end of file
diff --git a/selling/doctype/lead/lead.py b/selling/doctype/lead/lead.py
index c336fe2..571cdfd 100644
--- a/selling/doctype/lead/lead.py
+++ b/selling/doctype/lead/lead.py
@@ -97,7 +97,6 @@
return webnotes.conn.get_value('Sales Email Settings',None,'email_id')
def on_trash(self):
- webnotes.conn.sql("""delete from tabCommunication where lead=%s""",
- self.doc.name)
+ webnotes.conn.sql("""update tabCommunication set lead=null where lead=%s""", self.doc.name)
webnotes.conn.sql("""update `tabSupport Ticket` set lead='' where lead=%s""",
self.doc.name)
diff --git a/setup/doctype/backup_manager/backup_dropbox.py b/setup/doctype/backup_manager/backup_dropbox.py
index e8eed3b..b668803 100644
--- a/setup/doctype/backup_manager/backup_dropbox.py
+++ b/setup/doctype/backup_manager/backup_dropbox.py
@@ -1,3 +1,13 @@
+# SETUP:
+# install pip install --upgrade dropbox
+#
+# Create new Dropbox App
+#
+# in conf.py, set oauth2 settings
+# dropbox_access_key
+# dropbox_access_secret
+
+
import os
import webnotes
from webnotes.utils import get_request_site_address, get_base_path
@@ -11,7 +21,7 @@
+ "?cmd=setup.doctype.backup_manager.backup_dropbox.dropbox_callback"
url = sess.build_authorize_url(request_token, return_address)
-
+
return {
"url": url,
"key": request_token.key,
@@ -20,43 +30,43 @@
@webnotes.whitelist(allow_guest=True)
def dropbox_callback(oauth_token=None, not_approved=False):
+ from dropbox import client
if not not_approved:
if webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key")==oauth_token:
- webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 1)
+ allowed = 1
message = "Dropbox access allowed."
sess = get_dropbox_session()
sess.set_request_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"),
webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret"))
access_token = sess.obtain_access_token()
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key", access_token.key)
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret", access_token.secret)
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", allowed)
+ dropbox_client = client.DropboxClient(sess)
+ dropbox_client.file_create_folder("files")
- webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key",
- access_token.key)
- webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret",
- access_token.secret)
-
else:
- webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0)
+ allowed = 0
message = "Illegal Access Token Please try again."
else:
- webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0)
+ allowed = 0
message = "Dropbox Access not approved."
-
+
webnotes.message_title = "Dropbox Approval"
webnotes.message = "<h3>%s</h3><p>Please close this window.</p>" % message
-
+
webnotes.conn.commit()
webnotes.response['type'] = 'page'
webnotes.response['page_name'] = 'message.html'
def backup_to_dropbox():
- from dropbox import client, session
+ from dropbox import client, session, rest
from conf import dropbox_access_key, dropbox_secret_key
from webnotes.utils.backups import new_backup
if not webnotes.conn:
webnotes.connect()
-
sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder")
sess.set_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"),
@@ -66,25 +76,25 @@
# upload database
backup = new_backup()
- filename = backup.backup_path_db
+ filename = os.path.join(get_base_path(), "public", "backups",
+ os.path.basename(backup.backup_path_db))
upload_file_to_dropbox(filename, "database", dropbox_client)
- path1 = os.path.join(get_base_path(), "public", "backups")
- response = dropbox_client.metadata('/database')
-
- #add missing files
- found = False
- for filename in os.listdir(path1):
+
+ response = dropbox_client.metadata("/files")
+
+ # upload files to files folder
+ filename = os.path.join(get_base_path(), "public", "files")
+ for filename in os.listdir(filename):
found = False
pth=path1+'/'+filename
size=os.stat(pth).st_size
for file_metadata in response["contents"]:
if filename==os.path.basename(file_metadata["path"]):
- if size==file_metadata["bytes"]:
+ if os.stat(filename).st_size==file_metadata["bytes"]:
found=True
+
if not found:
- upload_file_to_dropbox(pth, "database", dropbox_client)
- if found:
- webnotes.msgprint("no backup required everything is upto date")
+ upload_file_to_dropbox(os.path.join(get_base_path(),"public", "files", filename), "files", dropbox_client)
def get_dropbox_session():
from dropbox import session
@@ -93,22 +103,22 @@
except ImportError, e:
webnotes.msgprint(_("Please set Dropbox access keys in") + " conf.py",
raise_exception=True)
-
sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder")
return sess
def upload_file_to_dropbox(filename, folder, dropbox_client):
- size = os.stat(filename).st_size
- f = open(filename,'r')
- if size > 4194304:
- uploader = dropbox_client.get_chunked_uploader(f, size)
- while uploader.offset < size:
- try:
- uploader.upload_chunked()
- except rest.ErrorResponse, e:
- pass
- else:
- response = dropbox_client.put_file(folder + "/" + os.path.basename(filename), f, overwrite=True)
+ size = os.stat(filename).st_size
+ f = open(filename,'r')
+ if size > 4194304:
+ uploader = dropbox_client.get_chunked_uploader(f, size)
+ while uploader.offset < size:
+ try:
+ uploader.upload_chunked()
+ finish(folder + '/' + os.path.basename(filename), overwrite='True')
+ except rest.ErrorResponse, e:
+ pass
+ else:
+ response = dropbox_client.put_file(folder + "/" + os.path.basename(filename), f, overwrite=True)
if __name__=="__main__":
- backup_to_dropbox()
\ No newline at end of file
+ backup_to_dropbox()
diff --git a/setup/doctype/backup_manager/backup_googledrive.py b/setup/doctype/backup_manager/backup_googledrive.py
new file mode 100644
index 0000000..c794672
--- /dev/null
+++ b/setup/doctype/backup_manager/backup_googledrive.py
@@ -0,0 +1,161 @@
+# SETUP:
+# install pip install --upgrade google-api-python-client
+#
+# In Google API
+# - create new API project
+# - create new oauth2 client (create installed app type as google \
+# does not support subdomains)
+#
+# in conf.py, set oauth2 settings
+# gdrive_client_id
+# gdrive_client_secret
+
+import httplib2
+import sys
+import os
+import mimetypes
+import webnotes
+import oauth2client.client
+from webnotes.utils import get_request_site_address, get_base_path
+from webnotes import _, msgprint
+from apiclient.discovery import build
+from apiclient.http import MediaFileUpload
+
+@webnotes.whitelist()
+def get_gdrive_authorize_url():
+ flow = get_gdrive_flow()
+ authorize_url = flow.step1_get_authorize_url()
+ return {
+ "authorize_url": authorize_url,
+ }
+
+@webnotes.whitelist()
+def upload_files(name, mimetype, service, folder_id):
+ if not webnotes.conn:
+ webnotes.connect()
+ file_name = os.path.basename(name)
+ media_body = MediaFileUpload(name, mimetype=mimetype, resumable=True)
+ body = {
+ 'title': file_name,
+ 'description': 'Backup File',
+ 'mimetype': mimetype,
+ 'parents': [{
+ 'kind': 'drive#filelink',
+ 'id': folder_id
+ }]
+ }
+ request = service.files().insert(body=body, media_body=media_body)
+ response = None
+ while response is None:
+ status, response = request.next_chunk()
+
+def backup_to_gdrive():
+ from webnotes.utils.backups import new_backup
+ found_database = False
+ found_files = False
+ if not webnotes.conn:
+ webnotes.connect()
+ flow = get_gdrive_flow()
+ credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials")
+ credentials = oauth2client.client.Credentials.new_from_json(credentials_json)
+ http = httplib2.Http()
+ http = credentials.authorize(http)
+ drive_service = build('drive', 'v2', http=http)
+
+ # upload database
+ backup = new_backup()
+ path = os.path.join(get_base_path(), "public", "backups")
+ filename = os.path.join(path, os.path.basename(backup.backup_path_db))
+
+ # upload files to database folder
+ upload_files(filename, 'application/x-gzip', drive_service,
+ webnotes.conn.get_value("Backup Manager", None, "database_folder_id"))
+
+ # upload files to files folder
+ path = os.path.join(get_base_path(), "public", "files")
+ for files in os.listdir(path):
+ filename = path + "/" + files
+ ext = filename.split('.')[-1]
+ size = os.path.getsize(filename)
+ if ext == 'gz' or ext == 'gzip':
+ mimetype = 'application/x-gzip'
+ else:
+ mimetype = mimetypes.types_map["." + ext]
+ #Compare Local File with Server File
+ param = {}
+ children = drive_service.children().list(
+ folderId=webnotes.conn.get_value("Backup Manager", None, "files_folder_id"),
+ **param).execute()
+ for child in children.get('items', []):
+ file = drive_service.files().get(fileId=child['id']).execute()
+ if files == file['title'] and size == int(file['fileSize']):
+ found_files = True
+ break
+ if not found_files:
+ upload_files(filename, mimetype, drive_service, webnotes.conn.get_value("Backup Manager", None, "files_folder_id"))
+
+def get_gdrive_flow():
+ from oauth2client.client import OAuth2WebServerFlow
+ import conf
+
+ if not hasattr(conf, "gdrive_client_id"):
+ webnotes.msgprint(_("Please set Google Drive access keys in") + " conf.py",
+ raise_exception=True)
+
+ #callback_url = get_request_site_address(True) \
+ # + "?cmd=setup.doctype.backup_manager.backup_googledrive.googledrive_callback"
+
+ # for installed apps since google does not support subdomains
+ redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
+
+ flow = OAuth2WebServerFlow(conf.gdrive_client_id, conf.gdrive_client_secret,
+ "https://www.googleapis.com/auth/drive", redirect_uri)
+ return flow
+
+@webnotes.whitelist()
+def gdrive_callback(verification_code = None):
+ flow = get_gdrive_flow()
+ if verification_code:
+ credentials = flow.step2_exchange(verification_code)
+ allowed = 1
+
+ # make folders to save id
+ http = httplib2.Http()
+ http = credentials.authorize(http)
+ drive_service = build('drive', 'v2', http=http)
+ erpnext_folder_id = create_erpnext_folder(drive_service)
+ database_folder_id = create_folder('database', drive_service, erpnext_folder_id)
+ files_folder_id = create_folder('files', drive_service, erpnext_folder_id)
+
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_access_allowed", allowed)
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "database_folder_id", database_folder_id)
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "files_folder_id", files_folder_id)
+ final_credentials = credentials.to_json()
+ webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_credentials", final_credentials)
+
+ webnotes.msgprint("Updated")
+
+def create_erpnext_folder(service):
+ if not webnotes.conn:
+ webnotes.connect()
+ erpnext = {
+ 'title': 'erpnext',
+ 'mimeType': 'application/vnd.google-apps.folder'
+ }
+ erpnext = service.files().insert(body=erpnext).execute()
+ return erpnext['id']
+
+def create_folder(name, service, folder_id):
+ database = {
+ 'title': name,
+ 'mimeType': 'application/vnd.google-apps.folder',
+ 'parents': [{
+ 'kind': 'drive#fileLink',
+ 'id': folder_id
+ }]
+ }
+ database = service.files().insert(body=database).execute()
+ return database['id']
+
+if __name__=="__main__":
+ backup_to_gdrive()
\ No newline at end of file
diff --git a/setup/doctype/backup_manager/backup_manager.js b/setup/doctype/backup_manager/backup_manager.js
index 154c72e..28315c5 100644
--- a/setup/doctype/backup_manager/backup_manager.js
+++ b/setup/doctype/backup_manager/backup_manager.js
@@ -1,24 +1,65 @@
+cur_frm.cscript.refresh = function(doc) {
+ cur_frm.disable_save();
+}
+
+//dropbox
cur_frm.cscript.allow_dropbox_access = function(doc) {
- wn.call({
- method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url",
- callback: function(r) {
- if(!r.exc) {
- cur_frm.set_value("dropbox_access_secret", r.message.secret);
- cur_frm.set_value("dropbox_access_key", r.message.key);
- cur_frm.save(null, function() {
- window.open(r.message.url);
- });
+ if (doc.send_notifications_to == '') {
+ msgprint("Please enter email address.")
+ }
+ else {
+ wn.call({
+ method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url",
+ callback: function(r) {
+ if(!r.exc) {
+ cur_frm.set_value("dropbox_access_secret", r.message.secret);
+ cur_frm.set_value("dropbox_access_key", r.message.key);
+ cur_frm.save(null, function() {
+ window.open(r.message.url);
+ });
+ }
}
- }
- })
+ })
+ }
}
cur_frm.cscript.backup_right_now = function(doc) {
msgprint("Backing up and uploading. This may take a few minutes.")
wn.call({
- method: "setup.doctype.backup_manager.backup_manager.take_backups",
+ method: "setup.doctype.backup_manager.backup_manager.take_backups_dropbox",
callback: function(r) {
msgprint("Backups taken. Please check your email for the response.")
}
})
-}
\ No newline at end of file
+}
+//gdrive
+cur_frm.cscript.allow_gdrive_access = function(doc) {
+ if (doc.send_notifications_to == '') {
+ msgprint("Please enter email address.")
+ }
+ else {
+ wn.call({
+ method: "setup.doctype.backup_manager.backup_googledrive.get_gdrive_authorize_url",
+ callback: function(r) {
+ window.open(r.message.authorize_url);
+ }
+ })
+ }
+}
+
+cur_frm.cscript.validate_gdrive = function(doc) {
+ wn.call({
+ method: "setup.doctype.backup_manager.backup_manager.gdrive_callback",
+ args: {
+ verification_code: doc.verification_code
+ },
+ });
+}
+
+cur_frm.cscript.upload_backups_to_dropbox = function(doc) {
+ cur_frm.save()
+}
+
+cur_frm.cscript.upload_backups_to_gdrive = function(doc) {
+ cur_frm.save()
+}
diff --git a/setup/doctype/backup_manager/backup_manager.py b/setup/doctype/backup_manager/backup_manager.py
index 2b9cfca..69de823 100644
--- a/setup/doctype/backup_manager/backup_manager.py
+++ b/setup/doctype/backup_manager/backup_manager.py
@@ -3,7 +3,8 @@
from __future__ import unicode_literals
import webnotes
from webnotes import _
-from webnotes.utils import getTraceback
+from backup_dropbox import dropbox_callback, get_dropbox_session, get_dropbox_authorize_url
+from backup_googledrive import gdrive_callback, get_gdrive_flow, get_gdrive_authorize_url
class DocType:
def __init__(self, d, dl):
@@ -17,10 +18,13 @@
def take_backups_if(freq):
if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_dropbox")==freq:
- take_backups()
-
+ take_backups_dropbox()
+
+ if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_gdrive")==freq:
+ take_backups_gdrive()
+
@webnotes.whitelist()
-def take_backups():
+def take_backups_dropbox():
try:
from setup.doctype.backup_manager.backup_dropbox import backup_to_dropbox
backup_to_dropbox()
@@ -29,6 +33,16 @@
webnotes.errprint(e)
send_email(False, "Dropbox", e)
+#backup to gdrive
+@webnotes.whitelist()
+def take_backups_gdrive():
+ try:
+ from setup.doctype.backup_manager.backup_googledrive import backup_to_gdrive
+ backup_to_gdrive()
+ send_email(True, "Google Drive")
+ except Exception, e:
+ send_email(False, "Google Drive", e)
+
def send_email(success, service_name, error_status=None):
if success:
subject = "Backup Upload Successful"
@@ -47,5 +61,5 @@
# email system managers
from webnotes.utils.email_lib import sendmail
- sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","),
- subject=subject, msg=message)
+ sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","),
+ subject=subject, msg=message)
diff --git a/setup/doctype/backup_manager/backup_manager.txt b/setup/doctype/backup_manager/backup_manager.txt
index a994e7d..9a43f34 100644
--- a/setup/doctype/backup_manager/backup_manager.txt
+++ b/setup/doctype/backup_manager/backup_manager.txt
@@ -1,8 +1,8 @@
[
{
- "creation": "2013-03-05 16:35:50",
+ "creation": "2013-03-15 11:06:59",
"docstatus": 0,
- "modified": "2013-03-07 12:18:07",
+ "modified": "2013-03-15 17:27:33",
"modified_by": "Administrator",
"owner": "Administrator"
},
@@ -56,7 +56,9 @@
"doctype": "DocField",
"fieldname": "backup_right_now",
"fieldtype": "Button",
- "label": "Backup Right Now"
+ "hidden": 1,
+ "label": "Backup Right Now",
+ "read_only": 1
},
{
"description": "Note: Backups and files are not deleted from Dropbox, you will have to delete them manually.",
@@ -103,6 +105,70 @@
"label": "Allow Dropbox Access"
},
{
+ "description": "Note: Backups and files are not deleted from Google Drive, you will have to delete them manually.",
+ "doctype": "DocField",
+ "fieldname": "sync_with_gdrive",
+ "fieldtype": "Section Break",
+ "label": "Sync with Google Drive"
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "upload_backups_to_gdrive",
+ "fieldtype": "Select",
+ "label": "Upload Backups to Google Drive",
+ "options": "Never\nDaily\nWeekly"
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "allow_gdrive_access",
+ "fieldtype": "Button",
+ "label": "Allow Google Drive Access"
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "verification_code",
+ "fieldtype": "Data",
+ "label": "Enter Verification Code"
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "validate_gdrive",
+ "fieldtype": "Button",
+ "label": "Validate"
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "gdrive_access_allowed",
+ "fieldtype": "Check",
+ "hidden": 1,
+ "label": "Google Drive Access Allowed",
+ "read_only": 1
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "gdrive_credentials",
+ "fieldtype": "Text",
+ "hidden": 1,
+ "label": "Credentials",
+ "read_only": 1
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "database_folder_id",
+ "fieldtype": "Data",
+ "hidden": 1,
+ "label": "Database Folder ID",
+ "read_only": 1
+ },
+ {
+ "doctype": "DocField",
+ "fieldname": "files_folder_id",
+ "fieldtype": "Data",
+ "hidden": 1,
+ "label": "Files Folder ID",
+ "read_only": 1
+ },
+ {
"doctype": "DocPerm"
}
]
\ No newline at end of file
diff --git a/stock/doctype/item/item.py b/stock/doctype/item/item.py
index 74bf3f3..931b776 100644
--- a/stock/doctype/item/item.py
+++ b/stock/doctype/item/item.py
@@ -238,8 +238,7 @@
from website.helpers.product import get_parent_item_groups, url_for_website
self.parent_groups = get_parent_item_groups(self.doc.item_group) + [{"name":self.doc.name}]
self.doc.website_image = url_for_website(self.doc.website_image)
- self.doc.title = self.doc.item_name == self.doc.name and self.doc.item_name or \
- (self.doc.item_name + " [" + self.doc.name + "]")
+ self.doc.title = self.doc.item_name
if self.doc.slideshow:
from website.helpers.slideshow import get_slideshow
diff --git a/website/doctype/style_settings/custom_template.css b/website/doctype/style_settings/custom_template.css
index f4038fb..712c748 100644
--- a/website/doctype/style_settings/custom_template.css
+++ b/website/doctype/style_settings/custom_template.css
@@ -47,10 +47,9 @@
{% if doc.page_border %}
/* Page Border*/
div.outer {
- -moz-box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
- -webkit-box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
- box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
- border-radius: 5px;
+ box-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
+ -moz-box-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
+ -webkibox-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
}
{% else %}
{% if doc.background_color == doc.page_background %}
@@ -61,6 +60,11 @@
{% endif %}
{% endif %}
+div.web-footer, div.web-footer a {
+ font-size: 90%;
+ color: #{{ get_hex_shade(doc.background_color or "ffffff", 70) }};
+}
+
/* Bootstrap Navbar */
.navbar-inverse .navbar-inner {
background-color: #{{ doc.top_bar_background or "444444"}};
diff --git a/website/doctype/style_settings/style_settings.txt b/website/doctype/style_settings/style_settings.txt
index 3f898e7..df266a5 100644
--- a/website/doctype/style_settings/style_settings.txt
+++ b/website/doctype/style_settings/style_settings.txt
@@ -2,7 +2,7 @@
{
"creation": "2013-03-08 11:36:53",
"docstatus": 0,
- "modified": "2013-03-12 13:35:14",
+ "modified": "2013-03-14 11:57:20",
"modified_by": "Administrator",
"owner": "Administrator"
},
@@ -150,14 +150,14 @@
"fieldtype": "Column Break"
},
{
- "description": "Add the name of Google Web Font e.g. \"Open Sans\"",
+ "description": "Add the name of <a href=\"http://google.com/webfonts\" target=\"_blank\">Google Web Font</a> e.g. \"Open Sans\"",
"doctype": "DocField",
"fieldname": "google_web_font_for_heading",
"fieldtype": "Data",
"label": "Google Web Font (Heading)"
},
{
- "description": "Add the name of Google Web Font e.g. \"Open Sans\"",
+ "description": "Add the name of <a href=\"http://google.com/webfonts\" target=\"_blank\">Google Web Font</a> e.g. \"Open Sans\"",
"doctype": "DocField",
"fieldname": "google_web_font_for_text",
"fieldtype": "Data",
diff --git a/website/doctype/website_settings/website_settings.js b/website/doctype/website_settings/website_settings.js
index a02c3b2..67e4941 100644
--- a/website/doctype/website_settings/website_settings.js
+++ b/website/doctype/website_settings/website_settings.js
@@ -50,5 +50,5 @@
var src = doc.banner_image;
if(src.indexOf("/")==-1) src = "files/" + src;
cur_frm.set_value("banner_html", "<a href='/'><img src='"+ src
- +"'></a>");
+ +"' style='max-width: 200px;'></a>");
}
\ No newline at end of file
diff --git a/website/helpers/blog_feed.py b/website/helpers/blog_feed.py
index c59a419..41c203e 100644
--- a/website/helpers/blog_feed.py
+++ b/website/helpers/blog_feed.py
@@ -44,7 +44,7 @@
<description>%(content)s</description>
<link>%(link)s</link>
<guid>%(name)s</guid>
- <pubDate>%(creation)s</pubDate>
+ <pubDate>%(published_on)s</pubDate>
</item>"""
def generate():
@@ -57,13 +57,12 @@
items = ''
blog_list = webnotes.conn.sql("""\
- select page_name as name, modified, creation, title from `tabBlog Post`
+ select page_name as name, published_on, modified, title, content from `tabBlog Post`
where ifnull(published,0)=1
- order by creation desc, modified desc, name asc limit 20""", as_dict=1)
+ order by published_on desc limit 20""", as_dict=1)
for blog in blog_list:
blog.link = host + '/' + blog.name + '.html'
- blog.content = get_blog_content(blog.name)
items += rss_item % blog
diff --git a/website/templates/css/login.css b/website/templates/css/login.css
index 710f889..c2a7af2 100644
--- a/website/templates/css/login.css
+++ b/website/templates/css/login.css
@@ -6,6 +6,7 @@
.layout-wrapper {
background-color: #fff;
+ color: #333;
padding: 10px;
box-shadow: 1px 1px 3px 3px #ccc;
font-size: 12px;
diff --git a/website/templates/css/product_page.css b/website/templates/css/product_page.css
index 5780ee4..566b6b5 100644
--- a/website/templates/css/product_page.css
+++ b/website/templates/css/product_page.css
@@ -1,6 +1,6 @@
<style>
.item-main-image {
- max-width: 400px;
+ max-width: 100%;
margin: auto;
}
.web-long-description {
diff --git a/website/templates/html/product_page.html b/website/templates/html/product_page.html
index 5bffe91..f897a31 100644
--- a/website/templates/html/product_page.html
+++ b/website/templates/html/product_page.html
@@ -11,46 +11,42 @@
{% block content %}
{% include 'html/product_search_box.html' %}
{% include 'html/product_breadcrumbs.html' %}
- <div class="span12">
- <h3 itemprop="name">{{ item_name }}</h3>
- <p class="help">Item Code: {{ name }}</p>
- </div>
<div class="span12 product-page-content" itemscope itemtype="http://schema.org/Product">
- {% if slideshow %}
- {% include "html/slideshow.html" %}
- {% else %}
- {% if website_image %}
- <image itemprop="image" class="item-main-image"
- src="{{ website_image }}" />
- {% else %}
- <div class="img-area">
- {% include 'html/product_missing_image.html' %}
- </div>
- {% endif %}
- {% endif %}
- <br><br>
<div class="row">
- <div class="span9">
- <h3>Product Description</h3>
+ <div class="span6">
+ {% if slideshow %}
+ {% include "html/slideshow.html" %}
+ {% else %}
+ {% if website_image %}
+ <image itemprop="image" class="item-main-image"
+ src="{{ website_image }}" />
+ {% else %}
+ <div class="img-area">
+ {% include 'html/product_missing_image.html' %}
+ </div>
+ {% endif %}
+ {% endif %}
+ </div>
+ <div class="span6">
+ <h3 itemprop="name" style="margin-top: 0px;">{{ item_name }}</h3>
+ <p class="help">Item Code: {{ name }}</p>
+ <h4>Product Description</h4>
<div itemprop="description">
{{ web_long_description or web_short_description or
"[No description given]" }}
</div>
- <hr>
{% if obj.doclist.get({"doctype":"Item Website Specification"}) %}
- <h3>Specifications</h3>
- <table class="table table-bordered" style="width: 100%">
- {% for d in obj.doclist.get(
- {"doctype":"Item Website Specification"}) %}
- <tr>
- <td style="width: 30%;">{{ d.label }}</td>
- <td>{{ d.description }}</td>
- </tr>
- {% endfor %}
- </table>
+ <h4>Specifications</h4>
+ <table class="table table-bordered" style="width: 100%">
+ {% for d in obj.doclist.get(
+ {"doctype":"Item Website Specification"}) %}
+ <tr>
+ <td style="width: 30%;">{{ d.label }}</td>
+ <td>{{ d.description }}</td>
+ </tr>
+ {% endfor %}
+ </table>
{% endif %}
- </div>
- <div class="span3">
<div class="item-price hide">
<p>Price:</p>
</div>
diff --git a/website/utils.py b/website/utils.py
index c7fe2f6..443bde0 100644
--- a/website/utils.py
+++ b/website/utils.py
@@ -309,13 +309,6 @@
return url
def get_hex_shade(color, percent):
- # switch dark and light shades
- if int(color, 16) > int("808080", 16):
- percent = -percent
-
- # stronger diff for darker shades
- if int(color, 16) < int("333333", 16):
- percent = percent * 2
def p(c):
v = int(c, 16) + int(int('ff', 16) * (float(percent)/100))
@@ -329,4 +322,16 @@
return h
r, g, b = color[0:2], color[2:4], color[4:6]
- return p(r) + p(g) + p(b)
\ No newline at end of file
+
+ avg = (float(int(r, 16) + int(g, 16) + int(b, 16)) / 3)
+ # switch dark and light shades
+ if avg > 128:
+ percent = -percent
+
+ # stronger diff for darker shades
+ if percent < 25 and avg < 64:
+ percent = percent * 2
+
+ return p(r) + p(g) + p(b)
+
+
\ No newline at end of file