import json
import re
from dotenv import dotenv_values
import bottle
from bottle import run, post, get, request, response, template
# from werkzeug.middleware.profiler import ProfilerMiddleware


from gogyup.words_api import WordsApi
import bottle_pymysql
from bottle_cors_plugin import cors_plugin
from gogyup.openai_tools import OpenAiTranslator

# os.environ['KMP_DUPLICATE_LIB_OK']='True'

config = dotenv_values('.env')
ENABLE_PREDICTIONS = int(config.get('ENABLE_PREDICTIONS', 0))
DISABLE_LOGGING = int(config.get('DISABLE_LOGGING', 0))

if ENABLE_PREDICTIONS:
    if config.get('APP_ENV') == 'local':
        from gogyup.mocks import SmartDictionary
    else:
        from gogyup.smart_dictionary import SmartDictionary
    smart = SmartDictionary()


words_api = WordsApi(path=config.get('WORDS_API_PATH'), path_extra=config.get('WORDS_EXTRA_PATH'))


@get('/translate')
def translate():
    return template('translate')

@post('/api/predict')
def predict_definitions():
    data = request.json
    context = data['context']
    target_position = data['targetPosition']
    candidate_definitions = data['candidateDefinitions']
    sort_defs = bool(data.get('sort'))
    predictions = smart.predict(context, target_position, candidate_definitions, sort_defs)
    return json_response(predictions)


@get('/api/define')
def define(pymydb):
    """
    Either a word or a context (or both) must be provided.
    If no context is given, then an unsorted list of definitions will be returned.
    If no word is provided, then a context and offset must be provided.
    If a starting position is provided, then a context must also be provided.
    If a part of speech is provided then results will be filtered by part of speech.
    """
    target_word = request.query.word
    if target_word.endswith("'s"):
        target_word = target_word[:-2]
    context = request.query.context
    part_of_speech = request.query.pos
    start = int(request.query.start or -1)
    # offset = int(request.query.offset or 0)
    offset = 0
    sort_ = int(request.query.sort or 1)
    predict = ENABLE_PREDICTIONS and int(request.query.predict or 0)
    no_log = DISABLE_LOGGING or int(request.query.nolog or 0)
    try:
        limit = int(request.query.limit)
    except:
        limit = 10
    
    # Log request
    if no_log == 0:
        ip_address = request.headers.get('Referer')
        log_definition_request(pymydb, target_word, context, part_of_speech, start, offset, sort_, predict, ip_address)

    # Validate request
    if not (target_word or context):
        return json_error("Either a word or a context (or both) must be provided.")

    if predict and not context:
        return json_error("A context is required to make predictions.")

    if start != -1 and not context:
        return json_error("If a starting position is provided, then a context must also be provided.")

    if not target_word and not context:
        return json_error("If no word is provided, then a context and starting position must be provided.")

    if context and ENABLE_PREDICTIONS:
        for i, match in enumerate(re.finditer(r"\w+['\w]*\b", context)):
            word = match.group(0)
            if start == -1:
                if word.lower() == target_word.lower():
                    start = match.start()
                    break
                continue
            if match.start() > start:
                return json_error("Word not found in context.")
            if match.start() == start:
                word = match.group(0)
                if not target_word:
                    target_word = word
                elif target_word.lower() != word.lower():
                    return json_error("Word not found in context")
                break
        else:
            return json_error("Word not found in context.")
    elif not target_word:
        return json_error("If no word is provided, then a context and offset must be provided.")

    # if context and start != -1:
    #     for i, match in enumerate(re.finditer(r"\w+['\w]*\b")):
    #         if match.start > start:
    #             return json_error("Word not found in context.")
    #         if match.start == start:
    #             word = match.group(0)
    #             if not target_word:
    #                 target_word = word
    #             elif target_word.lower() != word.lower():
    #                 return json_error("Word not found in context")
    #             break
    #     else:
    #         return json_error("Word not found in context.")
    # elif not target_word:
    #     return json_error('If no word is provided, then a context and offset must be provided.')

    results = words_api.lookup(target_word, part_of_speech, offset)

    if predict and len(results) > 1:
        candidate_definitions = [(r['lemma'], r['definition']) for r in results]
        parts_of_speech = [r['partOfSpeech'] for r in results]
        target_position = None
        context_list = []
        for i, match in enumerate(re.finditer(r"\w+['\w]*\b", context)):
            word = match.group(0)
            context_list.append(word)
            if word.lower() == target_word.lower() and match.start() == start:
                target_position = i

        results = smart.predict(context_list, target_position, candidate_definitions, parts_of_speech, sort_)

    results = [
        {
            'datasets': ['wordsApi'],
            'headword': r['lemma'],
            'part_of_speech': r['partOfSpeech'],
            'senses': [{
                'definition': r['definition'],
                'probability': r.get('probability') if predict else None,
            }],
        } for r in results
    ]
    output = {
        'status': 200,
        'offset': offset,
        'count': len(results),
        'total': len(results),
        'results': results,
    }
    return json_response(output)


def log_definition_request(pymydb, target_word, context, part_of_speech, start, offset, sort_, predict, ip_address):
    sql = 'INSERT INTO `DefinitionRequests` (`target_word`, `context`, `part_of_speech`, `start`, `offset`,' \
          ' `sort`, `predict`, `ip_address`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)'
    pymydb.execute(sql, (target_word, context, part_of_speech, start, offset, sort_, predict, ip_address))


"""
Database operations:

  * Get lemma by id
  * Get lemma by text
  * Add lemma
  * Update lemma
  * Delete lemma
  * Get definition by id
  * Get all definitions by id
  * Get all definitions for lemma
  * Add definition
  

  
GET /api/extra/lemma/


"""


@get('/api/translate')
def translate():
    word = request.query.word
    context = request.query.context
    language = request.query.language
    if word and context and language:
        translator = OpenAiTranslator()
        translator.translate(word, context, language)
        res = translator.response
        if res and 'choices' in res and isinstance(res['choices'], list) and len(res['choices']) > 0:
            choices = translator.response['choices']
            for choice in choices:
                if 'text' in choice and isinstance(choice['text'], str):
                    choice['text'] = choice['text'].strip()
            return json_response(translator.response['choices'])
        else:
            return json_error('Translation failed')
    else:
        return json_error('Required parameters: word, context, language')


@get('/api/extra/define/<lemma>')
def define_extra(lemma, pymydb):
    return json_error('Not implemented')


@get('/api/extra/define_extra_by_id/<lemma_id>')
def define_extra_by_id(lemma_id, pymydb):
    return json_error('Not implemented')


@get('/api/pokemon/<pokemon_id>')
def pokemon(pokemon_id, pymydb):
    pokemon_id = int(pokemon_id)
    pymydb.execute('SELECT * from pokemon where id="%s"', (pokemon_id,))
    rows = pymydb.fetchall()
    print(rows)
    return json_response(rows)


def json_response(data):
    response.headers['Content-Type'] = 'application/json'
    response.status = 200
    return json.dumps(data)


def json_error(data):
    if isinstance(data, str):
        data = {'error': data}
    response.headers['Content-Type'] = 'application/json'
    response.status = 400
    return json.dumps(data)


plugin = bottle_pymysql.Plugin(
    dbhost=config.get('DB_HOST'),
    dbuser=config.get('DB_USER'),
    dbpass=config.get('DB_PASS'),
    dbname=config.get('DB_NAME'))

if __name__ == '__main__':
    print('Name = {}'.format(__name__))
    application = bottle.default_app()
    application.install(plugin)
    application.install(cors_plugin('*'))
    # application = ProfilerMiddleware(application, profile_dir='profiles')
    run(application, host='localhost', port=8080, debug=True, reloader=True)
    # run(host='localhost', port=8080, debug=True, plugins=[plugin])
else:
    print('Name = {}'.format(__name__))
    application = bottle.default_app()
    application.install(plugin)
    application.install(cors_plugin('*'))
    # application = ProfilerMiddleware(application, profile_dir='profiles')
