extends Node

var _req_headers: PackedStringArray

var _bulk_data: Array

signal fetch_done
var _emitted_done = 0
signal fetch_start
var _emitted_start = 0

var _consts = preload("res://data/consts.gd")


func _all_downloads_done() -> bool:
	return _emitted_done == _emitted_start


func _setup_cache_in_mem():
	var file = FileAccess.open("user://bulk.json", FileAccess.READ)
	_bulk_data = JSON.parse_string(file.get_as_text())
	file.close()


func setup() -> Error:
	if !FileAccess.file_exists("user://bulk.json"):
		get_bulk_data(false)
		push_error("Bulk Data was not downloaded! Downloading now!")
		return FAILED

	if !_all_downloads_done():
		push_error("Not done downloading Bulk Data.")
		return FAILED
	
	_fetch_mana_symbols()

	_setup_cache_in_mem()
	return OK


func _init() -> void:
	_req_headers = PackedStringArray(
		["User-Agent: " + _consts.APP_NAME + "/" + _consts.APP_VERSION, "Accept: */*"]
	)

	fetch_done.connect(_on_end_emit)
	fetch_start.connect(_on_start_emit)
	


func _on_start_emit() -> void:
	_emitted_start += 1


func _on_end_emit() -> void:
	_emitted_done += 1


func has_emitted_all() -> bool:
	return _emitted_start == _emitted_done


func _cache_error(err: String) -> String:
	return "CACHE::ERROR::" + err + "\n"


func _get_dict_from_file(filepath: String) -> Dictionary:
	var file = FileAccess.open(filepath, FileAccess.READ)
	var data = JSON.parse_string(file.get_as_text())

	return data


## get_card_data_from_name
##
## _name: String [br]
## A wrapper for searching for a card by name. Use **get_card_data_from_id** where possible, as it avoids an expensive search for the new card, if the card has been cached already.
func get_card_data_from_name(_name: String) -> Dictionary:
	return _get_card_data_from_bulk(_search_results_name(_name))


## get_card_data_from_id
##
## id: String [br]
## This is the preferred wrapper to use when fetching card data, it checks the cache for preexisting data and uses that if it's available. Otherwise, it will search the bulk json for the data.
func get_card_data_from_id(id: String) -> Dictionary:
	if FileAccess.file_exists("user://card_cache/" + id + "/card.json"):
		return _get_dict_from_file("user://card_cache/" + id + "/card.json")

	return _get_card_data_from_bulk(_search_results_generic("id", id))


func _search_results_name(search_query: String) -> Dictionary:
	for entry in _bulk_data:
		if entry["layout"] == "art_series":
			continue
		var entry_name = entry["name"]
		if entry_name.contains("//"):
			entry_name = entry_name.left(entry_name.find("//") - 1)
		if entry_name == search_query:
			return entry
	push_error("Could not find desired card {" + search_query + "}")
	return {}


func _search_results_generic(field: String, search_query: String) -> Dictionary:
	for entry in _bulk_data:
		if entry["layout"] == "art_series":
			continue
		if entry[field] == search_query:
			return entry[field]

	push_error("Could not find desired card {" + search_query + "}")
	return {}


func _get_card_data_from_bulk(dict_entry: Dictionary) -> Dictionary:
	if dict_entry["image_status"] != "missing":
		_fetch_card_img(dict_entry)

	var dir = DirAccess.open("user://")
	dir.make_dir_recursive("user://card_cache/" + dict_entry["id"] + "/")
	dir = null

	var file = FileAccess.open(
		"user://card_cache/" + dict_entry["id"] + "/card.json", FileAccess.WRITE
	)
	file.store_line(JSON.stringify(dict_entry, "\t"))
	file.close()

	print("Card: " + dict_entry["name"] + " (" + dict_entry["id"] + ") found, and cached.")

	return dict_entry

func _get_mana_img(symbol: String, img_url: String) -> Error:
	fetch_start.emit()
	if FileAccess.file_exists("res://symbol_cache/" + symbol + ".svg"):
		return OK

	var httpr = HTTPRequest.new()
	add_child(httpr)

	var err = httpr.request(img_url, _req_headers)
	if err != OK:
		push_error(_cache_error("GET_REQUEST") + "An error occured in the Scryfall request.")
		return FAILED
	var resp = await httpr.request_completed

	var img = Image.new()
	err = img.load_svg_from_buffer(resp[3])
	if err != OK:
		push_error(_cache_error("IMG_LOADING") + "Couldn't load the image.")
		return FAILED
	
	
	if img.get_size() == Vector2i(100, 100):
		print("resizing")
		img.resize(20, 20, Image.INTERPOLATE_LANCZOS)
		
	img.save_png("res://symbol_cache/" + symbol.replace("/", "-").replace("{", "").replace("}", "") + ".png")
	img = null

	fetch_done.emit()
	
	return OK


func _fetch_mana_symbols() -> Error:
	var mana_symbols: Dictionary = Dictionary()
	if DirAccess.dir_exists_absolute("res://symbol_cache"):
		return OK
	else:
		DirAccess.make_dir_absolute("res://symbol_cache")
	
	var httpr = HTTPRequest.new()
	add_child(httpr)
	
	var err = httpr.request("https://api.scryfall.com/symbology", _req_headers)
	if err != OK:
		push_error(_cache_error("GET_REQUEST") + "An error occured in the Scryfall request.")
		return FAILED
	var resp = await httpr.request_completed
	
	var unprocessed_body = resp[3].get_string_from_utf8()
	var json_body = JSON.parse_string(unprocessed_body)
	for icon in json_body["data"]:
		err = await _get_mana_img(icon["symbol"], icon["svg_uri"])
		if err != OK:
			push_error("Couldn't fetch mana symbol " + icon["symbol"])
		mana_symbols[icon["symbol"]] = "res://symbol_cache/" + icon["symbol"].replace("/", "-").replace("{", "").replace("}", "") + ".png"
		print(icon["symbol"] + " image cached.")
		
	var file = FileAccess.open("res://symbol_cache/symbols.json", FileAccess.WRITE)
	file.store_line(JSON.stringify(mana_symbols))
	file.close()
	
	print("Done caching mana symbols.")
	return OK

func _fetch_card_img(data: Dictionary) -> Error:
	fetch_start.emit()
	if FileAccess.file_exists("user://card_cache/" + data["id"] + "card.png"):
		return OK

	var httpr = HTTPRequest.new()
	add_child(httpr)

	var err = httpr.request((data["image_uris"])["png"], _req_headers)
	if err != OK:
		push_error(_cache_error("GET_REQUEST") + "An error occured in the Scryfall request.")
		return FAILED
	var resp = await httpr.request_completed

	var img = Image.new()
	err = img.load_png_from_buffer(resp[3])
	if err != OK:
		push_error(_cache_error("IMG_LOADING") + "Couldn't load the image.")
		return FAILED

	var dir = DirAccess.open("user://")
	dir.make_dir_recursive("user://card_cache/" + data["id"] + "/")
	dir = null

	img.save_png("user://card_cache/" + data["id"] + "/card.png")
	img = null

	fetch_done.emit()

	return OK


func get_bulk_data(force: bool) -> Error:
	if FileAccess.file_exists("user://bulk.json"):
		if force:
			DirAccess.remove_absolute("user://bulk.json")
		else:
			return OK
	var httpr = HTTPRequest.new()
	add_child(httpr)

	var error = httpr.request("https://api.scryfall.com/bulk-data/unique-artwork", _req_headers)
	if error != OK:
		push_error(_cache_error("GET_REQUEST") + "An error occurred in the Scryfall request.")
		return FAILED

	var response = await httpr.request_completed
	if response[0] != HTTPRequest.RESULT_SUCCESS:
		push_error(_cache_error("GET_REQUEST") + "Failed to fetch card data from Scryfall")
		return FAILED

	var unprocessed_body = response[3].get_string_from_utf8()
	var card_content = JSON.parse_string(unprocessed_body)
	if card_content == null:
		push_error(_cache_error("PARSING") + "Failed to parse the Scryfall card results.")
		return FAILED

	error = httpr.request(card_content["download_uri"], _req_headers)
	if error != OK:
		push_error(_cache_error("GET_REQUEST") + "An error occurred in the Scryfall request.")
		return FAILED

	response = await httpr.request_completed
	if response[0] != HTTPRequest.RESULT_SUCCESS:
		push_error(_cache_error("GET_REQUEST") + "Failed to fetch card data from Scryfall")
		return FAILED

	unprocessed_body = response[3].get_string_from_utf8()
	card_content = JSON.parse_string(unprocessed_body)
	if card_content == null:
		push_error(_cache_error("PARSING") + "Failed to parse the Scryfall card results.")
		return FAILED

	var data_cache = FileAccess.open("user://bulk.json", FileAccess.WRITE)
	data_cache.store_string(unprocessed_body)
	data_cache.close()

	fetch_done.emit()

	return OK


func _notification(what):
	if what == NOTIFICATION_PREDELETE:
		if !_all_downloads_done():
			push_error(
				"ERR::MEM::CACHE\nCache being deleted before all threads have finished processing!"
			)