import codecs
import os
from bs4 import BeautifulSoup as bs
from bs4 import NavigableString as ns
import argparse
import numpy.random as rand
import auxiliary
import pandas as pd

###### This script is used to generate an HTML report of the SR results ######
PSNR= [ 25.28, 25.35, 24.54, 24.75, 25.09,   25.28,25.03, 24.31,  16.34, 21.21, 24.44  ]
SSIM= [0.655, 0.716, 0.720, 0.709, 0.714, 0.705, 0.717, 0.725, 0.711]
# [20.92, 24.62,23.18, 24.51,24.03,  24.04, 24.65, 22.82] # Add the PSNR values for the different losses here

parser = argparse.ArgumentParser(description="HTML Report Generator")
parser.add_argument("--sr_results_dir", default='./', help="The path of the directory where the current results are located (assumed to be in .png format") # Mandatory argument
parser.add_argument("--gt_dir", default= './results/GT', help="The directory where the ground truth images are located") # Mandatory argument
parser.add_argument("--gt_extension",default = 'jpg', help="The extension of the ground truth images. Default is .jpg ")
parser.add_argument("--img_ext",default = 'jpg', help="The extension of the ground truth images. Default is .jpg ") # Optional argument

opt = parser.parse_args()
sr_results_dir = opt.sr_results_dir
gt_dir = opt.gt_dir
gt_extension = opt.gt_extension
img_ext = opt.img_ext






# Using readlines()
file1 = open('html_text.txt', 'r')
Lines = file1.readlines()
 
count = 0
list_of_descriptions = []
# Strips the newline character
for line in Lines:
    list_of_descriptions.append(line.strip())

sr_results_dir = sr_results_dir+'results/'






losses = []
list__ = os.listdir(sr_results_dir)
list__ = sorted(list__)
for i in sorted(os.listdir(sr_results_dir)):
    if os.path.isdir(os.path.join(sr_results_dir,i)) and '.ipynb' not in i and 'GT' not in i and '.DS_Store' not in i:
        losses.append(i)


soup = bs()
html = soup.new_tag("html")
body = soup.new_tag("body")
script = soup.new_tag("script")
script.attrs['src'] = "https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"
h1 = soup.new_tag("h1")
h1.append(list_of_descriptions[0])
p1 = soup.new_tag("p")
br = soup.new_tag("br")
p1.append(list_of_descriptions[1])

p2 = soup.new_tag("p")
p2.append(list_of_descriptions[2])
soup.append(html)
html.append(body)
body.append(script)
body.append(h1)
body.append(p1)
body.append(p2)
#body.append(br)

div_checkbox = soup.new_tag("div")
div_checkbox.attrs['method'] = "get"
div_checkbox.attrs['id'] = "form"
div_checkbox.attrs['value'] = "form"
for loss in losses:
    input_tagx = soup.new_tag("input")
    input_tagx.attrs['class'] = "checkBoxes"
    input_tagx.attrs['type'] = "checkbox"
    input_tagx.attrs['name'] = "Loss Function"
    input_tagx.attrs['id'] = loss
    input_tagx.attrs['value'] = loss
    input_tagx.append(loss)
    div_checkbox.append(input_tagx)
    div_checkbox.append(soup.new_tag("br"))
    
input_tagx = soup.new_tag("input")
input_tagx.attrs['id'] = "#toggle"
input_tagx.attrs['type'] = "submit"
input_tagx.attrs['onclick'] = "return ColumnDelete()"
input_tagx.attrs['value'] = "submit"
div_checkbox.append(input_tagx)

input_tagx = soup.new_tag("input")
input_tagx.attrs['id'] = "select_all"
input_tagx.attrs['type'] = "button"
input_tagx.attrs['name'] = "select_all"
input_tagx.attrs['onclick'] = "return toggle()"
input_tagx.attrs['value'] = "Select All"
div_checkbox.append(input_tagx)
body.append(div_checkbox)
script = soup.new_tag("script")
script.append(""" function toggle() { """)
for loss in losses:
    script.append("""document.getElementById("{}").checked = !document.getElementById("{}").checked""".format(loss,loss))
    
script.append(""" }  $(document).ready(function() {
           $('#select_all').click(function() {
               $('.checkBoxes').each(function() {
                   $(this).attr('checked',!$(this).attr('checked'));
               });
           });
       });""")
body.append(script)
script = soup.new_tag("script")
script.attrs['type'] = "text/javascript"
script.append("""

var para = document.createElement("h3");

function ModifypecificColumns(table_id, loss,remove) {
    elem = document.getElementById(table_id);
    for (i = 0; i < elem.childElementCount; i++) {

      var child = elem.children[i]
      for(j= 0; j < child.childElementCount; j++){
          var grandchild = child.children[j]
          if(grandchild.id == loss){
              if (remove){
                  grandchild.style.display="none"
              }
              else{
                  grandchild.style.display=""
              }
              
          }
      }
                
      }
}
""")

body.append(script)



# Generate CSS style

style = soup.new_tag("style")
style.attrs['type'] = "text/css"
style.append(auxiliary.generate_style())
body.append(style)


table_div = soup.new_tag("div")
table_div.attrs['class'] = "Table"
table_div.attrs['id'] = "Table"
folder_sampler_dir = os.path.join(sr_results_dir,losses[0])
images = [i for i in os.listdir(folder_sampler_dir) if os.path.isdir(os.path.join(folder_sampler_dir,i)) and '.ipynb' not in i]
### This needs to be done as many times as there are images in the HTML file

heading_div = soup.new_tag("div")
heading_div.attrs['class'] = "Heading"
heading_div.attrs['id'] = "Heading"
div_list = []
for loss in losses:
    cell_div = soup.new_tag("div")
    cell_div.attrs['class'] = "Cell"
    cell_div.attrs['id'] = loss
    loss = loss.replace("7","+")
    loss = loss.replace("x","*")
    loss = loss.replace("p",".")
    cell_div.append(loss)
    div_list.append(cell_div)
    
cell_div = soup.new_tag("div")
cell_div.attrs['class'] = "Cell"
cell_div.attrs['id'] = "Ground Truth"
cell_div.append("Ground Truth")
div_list.insert(0, cell_div)

for div in div_list:
    heading_div.append(div)
table_div.append(heading_div)

row_div = soup.new_tag("div")
row_div.attrs['class'] = "Row"
row_div.attrs['id'] = "PSNR_Values"
div_list = []

#for div in div_list:
#    heading_div.append(div)
#table_div.append(heading_div)
#
#row_div = soup.new_tag("div")
#row_div.attrs['class'] = "Row"
#row_div.attrs['id'] = "SSIM_Values"
#div_list = []

for index,loss in enumerate(losses):

    cell_div = soup.new_tag("div")
    cell_div.attrs['class'] = "Cell"
    cell_div.attrs['id'] = loss
    image_div = soup.new_tag("div")
    image_div.attrs['class'] = "image"
    image_div.attrs['id'] = loss
    img_div = soup.new_tag("p")
    
    img_div.append("PSNR: " + str(PSNR[index]))
    img_div.append(soup.new_tag("br"))
    #img_div.append("SSIM: "+str(SSIM[index]))
    image_div.append(img_div)
    cell_div.append(soup.new_tag("br"))
    cell_div.append(image_div)
    cell_div.append(soup.new_tag("br"))
    div_list.append(cell_div)

cell_div = soup.new_tag("div")
cell_div.attrs['class'] = "Cell"
cell_div.attrs['id'] = "Ground Truth"
image_div = soup.new_tag("div")
image_div.attrs['class'] = "image"
img_div = soup.new_tag("p")
img_div.append("Average metric score for the dataset: ")
image_div.append(img_div)
cell_div.append(soup.new_tag("br"))
cell_div.append(image_div)
cell_div.append(soup.new_tag("br"))
div_list.insert(0, cell_div)
for div in div_list:
    row_div.append(div)
table_div.append(row_div)


for image in images:
    heading_div = soup.new_tag("div")
    heading_div.attrs['class'] = "Heading"
    heading_div.attrs['id'] = "Heading"
    div_list = []
    for loss in losses:
        cell_div = soup.new_tag("div")
        cell_div.attrs['class'] = "Cell"
        cell_div.attrs['id'] = loss
        loss = loss.replace("7","+")
        loss = loss.replace("x","*")
        loss = loss.replace("p",".")
        cell_div.append(loss)
        div_list.append(cell_div)
    cell_div = soup.new_tag("div")
    cell_div.attrs['class'] = "Cell"
    cell_div.attrs['id'] = "Ground Truth"
    cell_div.append("Original image: "+' '+image+'.'+gt_extension)
    div_list.insert(0, cell_div)



    for div in div_list:
        heading_div.append(div)
    table_div.append(heading_div)
    
    
    row_div = soup.new_tag("div")
    row_div.attrs['class'] = "Row"
    row_div.attrs['id'] = image
    div_list = []
    for loss in losses:
        cell_div = soup.new_tag("div")
        cell_div.attrs['class'] = "Cell"
        cell_div.attrs['id'] = loss
        image_div = soup.new_tag("div")
        image_div.attrs['class'] = "image"
        image_div.attrs['id'] = loss
        img_div = soup.new_tag("img")
        img_div.attrs['src'] = os.path.join(sr_results_dir,loss,image,image+'_SR.'+img_ext)   # chnage this to .jpg
        cell_div.append(soup.new_tag("br"))
        image_div.append(img_div)
        cell_div.append(image_div)
        div_list.append(cell_div)
    cell_div = soup.new_tag("div")
    cell_div.attrs['class'] = "Cell"
    cell_div.attrs['id'] = "Ground Truth"
    image_div = soup.new_tag("div")
    image_div.attrs['class'] = "image"
    img_div = soup.new_tag("img")
    img_div.attrs['src'] = os.path.join(gt_dir,image+'.'+gt_extension)
    cell_div.append(soup.new_tag("br"))
    image_div.append(img_div)
    cell_div.append(image_div)
    div_list.insert(0, cell_div)
    for div in div_list:
        row_div.append(div)
    table_div.append(row_div)

body.append(table_div)


script = soup.new_tag("script")
script.append(auxiliary.generate_on_mouse_events(gt_dir,gt_extension,sr_results_dir,img_ext))

script.append(""" function ColumnDelete(){ \n""")
for loss in losses:
    script.append("""var {} = document.getElementById("{}").checked \n""".format(loss,loss))
    script.append(""" if ({}) {{
        ModifypecificColumns("Table","{}",false)
        }}
        else{{
            ModifypecificColumns("Table","{}",true)
        }} \n """.format(loss,loss,loss)
    )
script.append("""} """)
body.append(script)

f = open(os.path.join('test.html'),'w')
f.write(str(soup.prettify()))
 

