from django.utils import timezone
from django.db import connection

from gauth.models import Location


def get_weeks(_year, _week):
    if _week > 9:
        return [[w, _year] for w in range(_week, _week-10, -1)]
    else:
        this_year = [[w, _year] for w in range(_week, -1, -1)]
        this_year.extend([[_year-1, w] for w in range(52, 52-(10-len(this_year)), -1)])
        return this_year


def weekly_google_positive_review_count():
    sql = f'''
            select 
                week(create_time) as week,
                year(create_time) as year,
                location_id,
                count(*) as rev_count
            from review_review
            where 
                create_time >= (curdate() - interval 10 week) and
                star_rating > 3
            group by week(create_time), location_id
            ORDER BY week DESC;
           '''
    now = timezone.now()
    location_dict = dict(Location.objects.values_list('location_id', 'care_name'))
    with connection.cursor() as cursor:
        cursor.execute(sql)
        rows = cursor.fetchall()
        row_dict = {(row[0], row[1], row[2]): row[3] for row in rows}
        # Get all week names for graph label
        res = list()
        for w in get_weeks(now.year, now.isocalendar()[1]):
            weeks_res = []
            for loc in location_dict.keys():
                weeks_res.append(row_dict.get(tuple(w+[loc]), 0))
            # res[w[0]] = weeks_res
            res.append(weeks_res)
        return res, location_dict.values()


def weekly_google_negative_review_count():
    sql = f'''
            select 
                week(create_time) as week,
                year(create_time) as year,
                location_id,
                count(*) as rev_count
            from review_review
            where 
                create_time >= (curdate() - interval 10 week) and
                star_rating <= 3
            group by week(create_time), location_id
            ORDER BY week DESC;
           '''
    now = timezone.now()
    location_dict = dict(Location.objects.values_list('location_id', 'care_name'))
    with connection.cursor() as cursor:
        cursor.execute(sql)
        rows = cursor.fetchall()
        row_dict = {(row[0], row[1], row[2]): row[3] for row in rows}
        # Get all week names for graph label
        res = list()
        for w in get_weeks(now.year, now.isocalendar()[1]):
            weeks_res = []
            for loc in location_dict.keys():
                weeks_res.append(row_dict.get(tuple(w+[loc]), 0))
            # res[w[0]] = weeks_res
            res.append(weeks_res)
        return res


def weekly_facebook_positive_review_count():
    sql = f'''
            select 
                week(create_time) as week,
                year(create_time) as year,
                page_id,
                count(*) as rev_count
            from facebook_app_facebookreview
            where 
                create_time >= (curdate() - interval 10 week) and
                recommendation_type = true
            group by week(create_time), page_id
            ORDER BY week DESC;
           '''
    now = timezone.now()
    location_dict = dict(Location.objects.values_list('facebookpage__id', 'care_name'))
    with connection.cursor() as cursor:
        cursor.execute(sql)
        rows = cursor.fetchall()
        row_dict = {(row[0], row[1], row[2]): row[3] for row in rows}
        # Get all week names for graph label
        res = list()
        for w in get_weeks(now.year, now.isocalendar()[1]):
            weeks_res = []
            for loc in location_dict.keys():
                weeks_res.append(row_dict.get(tuple(w+[loc]), 0))
            # res[w[0]] = weeks_res
            res.append(weeks_res)
        return res, location_dict.values()


def weekly_facebook_negative_review_count():
    sql = f'''
            select 
                week(create_time) as week,
                year(create_time) as year,
                page_id,
                count(*) as rev_count
            from facebook_app_facebookreview
            where 
                create_time >= (curdate() - interval 10 week) and
                recommendation_type = false
            group by week(create_time), page_id
            ORDER BY week DESC;
           '''
    now = timezone.now()
    location_dict = dict(Location.objects.values_list('facebookpage__id', 'care_name'))
    with connection.cursor() as cursor:
        cursor.execute(sql)
        rows = cursor.fetchall()
        row_dict = {(row[0], row[1], row[2]): row[3] for row in rows}
        # Get all week names for graph label
        res = list()
        for w in get_weeks(now.year, now.isocalendar()[1]):
            weeks_res = []
            for loc in location_dict.keys():
                weeks_res.append(row_dict.get(tuple(w+[loc]), 0))
            # res[w[0]] = weeks_res
            res.append(weeks_res)
        return res


def weekly_positive_review_count():
    X, loc = weekly_google_positive_review_count()
    Y, loc = weekly_facebook_positive_review_count()
    result = [[X[i][j] + Y[i][j] for j in range(len(X[0]))] for i in range(len(X))]
    return result, loc


def weekly_negative_review_count():
    X = weekly_google_negative_review_count()
    Y = weekly_facebook_negative_review_count()
    result = [[X[i][j] + Y[i][j] for j in range(len(X[0]))] for i in range(len(X))]
    return result