import re
import time
import json
from requests import get, put, post
from gauth.auth_utils import get_gmb_id, get_auth_header
from .models import Review, Reply
from gauth.models import Location

from django.utils import timezone

_, account_id = get_gmb_id()
STAR_REVIEW_NUM = {'STAR_RATING_UNSPECIFIED': 0, 'ONE': 1, 'TWO': 2, 'THREE': 3, 'FOUR': 4, 'FIVE': 5}
BASE_URL = f'https://mybusiness.googleapis.com/v4/'


def clean_comment(text):
    rules = [
        {r'[^\x00-\x7F]+': ''},
        {r'^\(Google-\s*\)(.|\n|]\s)*\(\)': ''},
        {r'^\n*': ''}
    ]
    for rule in rules:
        for (k, v) in rule.items():
            regex = re.compile(k)
            text = regex.sub(v, text)
    text = text.rstrip()
    return text


def get_review_list_url(location_id, next_page_token=''):
    # An helper function that make a url that need to consume GMB review api
    return f'{BASE_URL}accounts/{account_id}/locations/{location_id}/reviews?pageToken='+next_page_token


def get_reply_url(location_id, review_id):
    return f'{BASE_URL}accounts/{account_id}/locations/{location_id}/reviews/{review_id}/reply'


def reply_review(review, replied_text):
    '''
    reply a review with a put request.
    :param review: review object -> a review which you want to reply.
    :param replied_text: string -> The actual reply that you want to post.
    :return:
    '''
    url = get_reply_url(review.location_id, review.review_id)
    headers = get_auth_header()
    payload = json.dumps({'comment': replied_text})
    response = put(url, headers=headers, data=payload)
    return response


def insert_review_into_database(reviews, loc_id):
    '''
    Insert reviews to database.
    :param reviews: all reviews for location.
    :param loc_id: location id unrecorded_reviews belongs to.
    :return: It insert all reviews if it is not exits in database and return nothing.
    '''
    for rev in reviews:
        review_id = rev.get('reviewId')
        try:
            review = Review.objects.get(pk=review_id)
        except Review.DoesNotExist:
            review = Review(review_id=review_id)
        review.comment = rev.get('comment')
        review.create_time = rev.get('createTime')
        review.update_time = rev.get('updateTime')
        review.star_rating = STAR_REVIEW_NUM[rev.get('starRating')]
        reviewer = rev.get('reviewer')
        review.reviewer_name = reviewer.get('displayName')
        review.reviewer_photo = reviewer.get('profilePhotoUrl')
        review.location_id = loc_id
        review_reply = rev.get('reviewReply')
        # Check if it is already  replied.
        if review_reply:
            replied_text = review_reply.get('comment')
            create_time = review_reply.get('updateTime')

            reply = Reply.objects.filter(
                replied_text=replied_text,
                create_time=create_time
                ).first()
            if not reply:
                reply = Reply.objects.create(
                        replied_text=replied_text,
                        create_time=create_time
                    )
            review.reply = reply
        else:
            review.reply = None
        review.save()


def sync_all_review(loc_id):
    '''
    Sync a location if any bad thing occur i.e. any network break.
    :param: loc_id -> Location id of a particular location
    :return: None -> It just update all reviews of this location and return nothing.
    '''
    next_page_token = ''
    headers = get_auth_header()
    while True:
        url = get_review_list_url(loc_id, next_page_token)
        time.sleep(5)
        res = get(url, headers=headers)
        if res.status_code == 401:
            headers = get_auth_header()
            continue
        data = res.json()
        reviews = data.get('reviews')
        if reviews:
            insert_review_into_database(reviews, loc_id)
        next_page_token = data.get('nextPageToken')
        if next_page_token is None:
            break


def fetch_last_20_reviews(loc_id, page_size=20):
    headers = get_auth_header()
    url = get_review_list_url(loc_id)+'&pageSize='+str(page_size)
    res = get(url, headers=headers)
    data = res.json()
    reviews = data.get('reviews')
    if len(reviews) > 0:
        insert_review_into_database(reviews, loc_id)


def store_batch_of_reviews(reviews):
    for rev in reviews:
        location_id = rev.get('name').split('/')[-1]
        rev = rev.get('review')

        review_id = rev.get('reviewId')
        try:
            review = Review.objects.get(pk=review_id)
        except Review.DoesNotExist:
            review = Review(review_id=review_id)
        comment = rev.get('comment')
        if comment:
            review.comment = clean_comment(comment)
        review.create_time = rev.get('createTime')
        review.update_time = rev.get('updateTime')
        review.star_rating = STAR_REVIEW_NUM[rev.get('starRating')]
        reviewer = rev.get('reviewer')
        review.reviewer_name = reviewer.get('displayName')
        review.reviewer_photo = reviewer.get('profilePhotoUrl')
        review.location_id = location_id
        review_reply = rev.get('reviewReply')
        # Check if it is already  replied.
        if review_reply:
            replied_text = review_reply.get('comment')
            create_time = review_reply.get('updateTime')
            reply = Reply.objects.filter(
                replied_text=replied_text,
                create_time=create_time
                ).first()
            if not reply:
                reply = Reply.objects.create(
                        replied_text=replied_text,
                        create_time=create_time
                    )
            review.reply = reply
        else:
            review.reply = None
        review.save()


def fetch_batch_of_reviews():
    headers = get_auth_header()
    url = f'{BASE_URL}accounts/{account_id}/locations:batchGetReviews'
    # location names should be in this format:
    # "accounts/103266181421855655295/locations/8918455867446117794",
    locations = Location.objects.all()
    location_names = [f'accounts/{account_id}/locations/{loc.location_id}' for loc in locations]
    '''
    post data format:
    {
      "locationNames": [
        string
      ],
      "pageSize": integer,                     -> Total number of reviews 
      "pageToken": string,                     -> If has any to go next page.
      "orderBy": string,                       -> By-default updateTime desc
      "ignoreRatingOnlyReviews": boolean       -> Whether to ignore rating-only reviews
    }
    '''
    payload = json.dumps({
        "locationNames": location_names
    })
    response = post(url, headers=headers, data=payload)
    if response.status_code == 200:
        data = response.json()
        location_reviews = data.get('locationReviews')
        store_batch_of_reviews(location_reviews)
    else:
        return None


def populate_reviews():
    start = timezone.now()
    locations = Location.objects.all().values('location_id')
    for loc in locations:
        loc_id = loc.get('location_id')
        sync_all_review(loc_id)
        # fetch_last_20_reviews(loc_id, page_size=200)
    end = timezone.now()
    elapsed = end - start
    print(f'Elapsed time: {elapsed.seconds//60} minutes and {elapsed.seconds % 60} secs.')


def get_bad_reviews(location_id, **kwargs):
    '''
    a utility function that return all reviews has less or equal three.
    :param location_id: str -> id of the location where reviews are belongs to
    :param kwargs: i.e (days=__, hours=__, minutes=__)
    :return: QuerySet -> all low rating reviews in last * days/hours/minutes

    Example --------------
    >>> get_bad_reviews(location_id='123456', days=5, hours=2, minute=1)
    >>> get_bad_reviews(location_id='123456', days=5)
    >>> get_bad_reviews(location_id='123456', hours=5)
    '''
    now = timezone.now()
    date = now - timezone.timedelta(**kwargs)
    reviews = Review.objects.filter(location_id=location_id, update_time__gte=date, star_rating__lte=3)
    return reviews