extends Node ## The generator works with pure numbers, you'll need something else to convert it to actual tiles ## Entropy for tiles far away from activity shouldn't be recalculated, so cache them var entropy_valid: Array[Array] = [] ##Options for each tile var entropy_total: PackedInt32Array = [] ##Total number of options, negative means unknown func _reset_entropy() -> void: entropy_valid.clear() entropy_valid.resize(Chunk.CHUNK_WIDTH * Chunk.CHUNK_HEIGHT) entropy_valid.fill([]) entropy_total.clear() entropy_total.resize(Chunk.CHUNK_WIDTH * Chunk.CHUNK_HEIGHT) entropy_total.fill(-1) func _update_entropy(chunk: Chunk, samples: Array[PackedInt32Array]) -> void: #iterate over indeterminate tiles to find their valid neighbours for x in range(Chunk.CHUNK_WIDTH): for y in range((Chunk.CHUNK_HEIGHT)): var index: int = y * Chunk.CHUNK_WIDTH + x #reduce number crunching if chunk.data[index] > 0: #if the tile is already set, ignore entropy_total[index] = 0 if entropy_total[index] < 0: entropy_valid[index] = _find_valid_samples_at(chunk, x, y, samples) #this overrides any pre-existing cached data entropy_total[index] = entropy_valid[index].size() #the entropy is stored separately for faster processing func _zero_entropy_at(chunk: Chunk, index: int) -> void: var clear_at := func (dx: int, dy: int) -> void: var idx = index + (dy * Chunk.CHUNK_WIDTH) + (dx) if idx >= 0 and idx < Chunk.CHUNK_WIDTH * Chunk.CHUNK_HEIGHT: if entropy_total[idx] > 0 and chunk.data[idx] <= 0: entropy_total[idx] = -1 #unset surrounding tiles clear_at.call(-1, -1) clear_at.call( 0, -1) clear_at.call( 1, -1) clear_at.call(-1, 0) entropy_total[index] = 0 #shortcut clear_at.call( 1, 0) clear_at.call(-1, +1) clear_at.call( 0, +1) clear_at.call( 1, +1) ## Creates a new chunk, derived from the given WFC samples func generate_chunk_at(_x: int, _y: int, chunk_array: Array[Chunk], samples: Array[PackedInt32Array]) -> Chunk: var chunk: Chunk = Chunk.new(_x, _y) _reset_entropy() #chunk.data[0] = 1 #DEBUG: seed _update_entropy(chunk, samples) while true: #TODO: would a floodfill approach work better? var index = _find_lowest_entropy_tile_index() if index < 0: break #none found, finished print(index, ":", entropy_total[index]) var s: PackedInt32Array = entropy_valid[index].pick_random() chunk.data[index] = s[4] _zero_entropy_at(chunk, index) _update_entropy(chunk, samples) chunk_array.append(chunk) return chunk ## Returns the index of a tile with the lowest entropy, without being zero func _find_lowest_entropy_tile_index() -> int: #find the lowest-entropy tile var lowest: int = -1 var lowest_list: Array[int] #smooth out order bias for i in range(Chunk.CHUNK_WIDTH * Chunk.CHUNK_HEIGHT): if lowest < 0: if entropy_total[i] <= 0: continue #no options lowest = i lowest_list = [i] continue #actually update the lowest found if entropy_total[i] > 0: if entropy_total[i] < entropy_total[lowest]: lowest = i lowest_list = [i] elif entropy_total[i] == entropy_total[lowest]: lowest_list.append(i) #finished return lowest_list.pick_random() if lowest_list.size() > 0 else lowest func _find_valid_samples_at(chunk: Chunk, tile_x: int, tile_y: int, _samples: Array[PackedInt32Array]) -> Array[PackedInt32Array]: var valid: Array[PackedInt32Array] = [] #use a lambda for easy reading below var compare := func (tile_value: int, sample: int) -> bool: return tile_value <= 0 or tile_value == sample #filter the samples for sample in _samples: if !compare.call(chunk.get_tile(tile_x -1, tile_y -1), sample[0]): continue if !compare.call(chunk.get_tile(tile_x , tile_y -1), sample[1]): continue if !compare.call(chunk.get_tile(tile_x +1, tile_y -1), sample[2]): continue if !compare.call(chunk.get_tile(tile_x -1, tile_y ), sample[3]): continue # // if !compare.call(chunk.get_tile(tile_x +1, tile_y ), sample[5]): continue if !compare.call(chunk.get_tile(tile_x -1, tile_y +1), sample[6]): continue if !compare.call(chunk.get_tile(tile_x , tile_y +1), sample[7]): continue if !compare.call(chunk.get_tile(tile_x +1, tile_y +1), sample[8]): continue valid.append(sample) return valid