← Volver al listado de tecnologías

Gestión de Recursos y Assets Pipeline

Por: Artiko
defoldassetsrecursospipelineoptimizacion

Gestión de Recursos y Assets Pipeline

La gestión eficiente de recursos es crucial para crear juegos escalables y optimizados. En esta lección aprenderás a crear sistemas robustos de asset management, pipelines automatizados y técnicas avanzadas de optimización de recursos.

📁 Sistema de Asset Management

Resource Manager Core

-- resource_manager.lua
local ResourceManager = {}
ResourceManager.__index = ResourceManager

local RESOURCE_TYPES = {
    TEXTURE = "texture",
    SOUND = "sound",
    ANIMATION = "animation",
    FONT = "font",
    SPINE = "spine",
    MODEL = "model",
    SCRIPT = "script"
}

local RESOURCE_PRIORITIES = {
    CRITICAL = 1,    -- UI, player assets
    HIGH = 2,        -- Enemies, level geometry
    MEDIUM = 3,      -- Effects, decorations
    LOW = 4,         -- Background elements
    STREAMING = 5    -- Large assets loaded on demand
}

function ResourceManager.new()
    local self = setmetatable({}, ResourceManager)
    self.loaded_resources = {}
    self.loading_queue = {}
    self.resource_metadata = {}
    self.memory_budget = {
        total = 256 * 1024 * 1024, -- 256MB
        textures = 128 * 1024 * 1024,
        audio = 64 * 1024 * 1024,
        other = 64 * 1024 * 1024
    }
    self.current_usage = {
        textures = 0,
        audio = 0,
        other = 0
    }
    self.reference_counts = {}
    self.lru_cache = {}
    self.loading_in_progress = {}
    return self
end

function ResourceManager:register_resource(path, metadata)
    metadata = metadata or {}
    self.resource_metadata[path] = {
        type = metadata.type or RESOURCE_TYPES.TEXTURE,
        priority = metadata.priority or RESOURCE_PRIORITIES.MEDIUM,
        size = metadata.size or 0,
        tags = metadata.tags or {},
        dependencies = metadata.dependencies or {},
        compression = metadata.compression,
        lod_levels = metadata.lod_levels or {},
        streaming = metadata.streaming or false
    }
end

function ResourceManager:load_resource(path, callback)
    -- Verificar si ya está cargado
    if self.loaded_resources[path] then
        self:add_reference(path)
        if callback then
            callback(self.loaded_resources[path], nil)
        end
        return self.loaded_resources[path]
    end

    -- Verificar si ya está en proceso de carga
    if self.loading_in_progress[path] then
        if callback then
            table.insert(self.loading_in_progress[path].callbacks, callback)
        end
        return nil
    end

    -- Verificar memoria disponible
    local metadata = self.resource_metadata[path]
    if metadata and not self:can_load_resource(metadata) then
        self:free_memory_for_resource(metadata)
    end

    -- Iniciar carga
    self.loading_in_progress[path] = {
        callbacks = callback and {callback} or {},
        start_time = socket.gettime()
    }

    self:start_loading(path, metadata)
    return nil
end

function ResourceManager:start_loading(path, metadata)
    if metadata.streaming then
        self:load_streaming_resource(path, metadata)
    else
        self:load_regular_resource(path, metadata)
    end
end

function ResourceManager:load_regular_resource(path, metadata)
    resource.load_async(path, function(self, url, resource_data)
        self:on_resource_loaded(path, url, resource_data)
    end)
end

function ResourceManager:load_streaming_resource(path, metadata)
    -- Para assets grandes, cargar en chunks
    local chunk_size = 64 * 1024 -- 64KB chunks

    http.request(path, "GET", function(self, id, response)
        if response.status == 200 then
            self:process_streaming_data(path, response.response)
        else
            self:on_loading_error(path, "HTTP error: " .. response.status)
        end
    end, nil, nil, {["Range"] = "bytes=0-" .. chunk_size})
end

function ResourceManager:on_resource_loaded(path, url, resource_data)
    local loading_info = self.loading_in_progress[path]
    if not loading_info then return end

    if resource_data then
        -- Registrar recurso cargado
        self.loaded_resources[path] = resource_data
        self:add_reference(path)
        self:update_memory_usage(path, true)
        self:update_lru_cache(path)

        -- Ejecutar callbacks
        for _, callback in ipairs(loading_info.callbacks) do
            callback(resource_data, nil)
        end

        print("Resource loaded:", path, "Time:", socket.gettime() - loading_info.start_time)
    else
        self:on_loading_error(path, "Failed to load resource")
    end

    self.loading_in_progress[path] = nil
end

function ResourceManager:on_loading_error(path, error_message)
    local loading_info = self.loading_in_progress[path]
    if not loading_info then return end

    -- Ejecutar callbacks con error
    for _, callback in ipairs(loading_info.callbacks) do
        callback(nil, error_message)
    end

    print("Resource loading failed:", path, "Error:", error_message)
    self.loading_in_progress[path] = nil
end

function ResourceManager:unload_resource(path)
    if not self.loaded_resources[path] then return false end

    if self:remove_reference(path) then
        -- Última referencia removida, descargar recurso
        resource.release(path)
        self.loaded_resources[path] = nil
        self:update_memory_usage(path, false)
        self:remove_from_lru_cache(path)
        return true
    end

    return false
end

function ResourceManager:add_reference(path)
    self.reference_counts[path] = (self.reference_counts[path] or 0) + 1
end

function ResourceManager:remove_reference(path)
    if not self.reference_counts[path] then return true end

    self.reference_counts[path] = self.reference_counts[path] - 1
    return self.reference_counts[path] <= 0
end

function ResourceManager:can_load_resource(metadata)
    local required_memory = metadata.size or 1024 -- Default 1KB si no se especifica

    if metadata.type == RESOURCE_TYPES.TEXTURE then
        return self.current_usage.textures + required_memory <= self.memory_budget.textures
    elseif metadata.type == RESOURCE_TYPES.SOUND then
        return self.current_usage.audio + required_memory <= self.memory_budget.audio
    else
        return self.current_usage.other + required_memory <= self.memory_budget.other
    end
end

function ResourceManager:free_memory_for_resource(metadata)
    local target_memory = metadata.size or 1024
    local freed_memory = 0

    -- Usar LRU para liberar recursos menos usados
    local lru_sorted = self:get_lru_sorted_resources()

    for _, path in ipairs(lru_sorted) do
        if freed_memory >= target_memory then break end

        local resource_metadata = self.resource_metadata[path]
        if resource_metadata and
           resource_metadata.priority > metadata.priority and
           self.reference_counts[path] == 0 then

            if self:unload_resource(path) then
                freed_memory = freed_memory + resource_metadata.size
            end
        end
    end

    return freed_memory >= target_memory
end

return ResourceManager

Asset Streaming System

-- asset_streaming.script
local ResourceManager = require "main.resource_manager"

function init(self)
    self.resource_manager = ResourceManager.new()
    self.streaming_zones = {}
    self.player_position = vmath.vector3()
    self.streaming_distance = 500 -- Distancia de streaming
    self.unload_distance = 800 -- Distancia de descarga
    self.update_interval = 1.0 -- Actualizar cada segundo
    self.last_update = 0

    -- Configurar zonas de streaming
    self:setup_streaming_zones()
end

local function setup_streaming_zones(self)
    -- Definir zonas del mapa con sus assets
    self.streaming_zones = {
        {
            name = "forest_area",
            bounds = {min = vmath.vector3(0, 0, 0), max = vmath.vector3(1000, 1000, 0)},
            assets = {
                "textures/forest_tileset.png",
                "sounds/forest_ambient.ogg",
                "models/tree_large.dae",
                "models/tree_small.dae"
            },
            priority = 2
        },
        {
            name = "city_area",
            bounds = {min = vmath.vector3(1000, 0, 0), max = vmath.vector3(2000, 1000, 0)},
            assets = {
                "textures/city_tileset.png",
                "sounds/city_ambient.ogg",
                "models/building_tall.dae",
                "models/car.dae"
            },
            priority = 2
        },
        {
            name = "boss_arena",
            bounds = {min = vmath.vector3(500, 1000, 0), max = vmath.vector3(1500, 1500, 0)},
            assets = {
                "textures/boss_arena.png",
                "sounds/boss_music.ogg",
                "animations/boss_idle.spine",
                "effects/boss_portal.particlefx"
            },
            priority = 1 -- Alta prioridad
        }
    }

    -- Registrar assets con metadata
    for _, zone in ipairs(self.streaming_zones) do
        for _, asset_path in ipairs(zone.assets) do
            self.resource_manager:register_resource(asset_path, {
                type = self:get_asset_type(asset_path),
                priority = zone.priority,
                size = self:estimate_asset_size(asset_path),
                streaming = true,
                zone = zone.name
            })
        end
    end
end

local function get_asset_type(self, path)
    local extension = string.match(path, "%.([^%.]+)$")
    local type_map = {
        png = "texture",
        jpg = "texture",
        ogg = "sound",
        wav = "sound",
        dae = "model",
        spine = "animation",
        particlefx = "effect"
    }
    return type_map[extension] or "other"
end

local function estimate_asset_size(self, path)
    -- Estimaciones aproximadas por tipo de asset
    local extension = string.match(path, "%.([^%.]+)$")
    local size_estimates = {
        png = 512 * 1024,     -- 512KB
        jpg = 256 * 1024,     -- 256KB
        ogg = 1024 * 1024,    -- 1MB
        wav = 2048 * 1024,    -- 2MB
        dae = 100 * 1024,     -- 100KB
        spine = 50 * 1024,    -- 50KB
        particlefx = 10 * 1024 -- 10KB
    }
    return size_estimates[extension] or 10 * 1024
end

local function get_distance_to_zone(self, zone, position)
    local bounds = zone.bounds
    local closest_point = vmath.vector3(
        math.max(bounds.min.x, math.min(position.x, bounds.max.x)),
        math.max(bounds.min.y, math.min(position.y, bounds.max.y)),
        0
    )
    return vmath.length(position - closest_point)
end

local function update_streaming(self)
    self.player_position = go.get_position("player") or self.player_position

    for _, zone in ipairs(self.streaming_zones) do
        local distance = get_distance_to_zone(self, zone, self.player_position)

        if distance <= self.streaming_distance then
            -- Cargar assets de la zona
            for _, asset_path in ipairs(zone.assets) do
                if not self.resource_manager.loaded_resources[asset_path] and
                   not self.resource_manager.loading_in_progress[asset_path] then

                    self.resource_manager:load_resource(asset_path, function(resource, error)
                        if resource then
                            print("Streamed in:", asset_path)
                        else
                            print("Failed to stream:", asset_path, error)
                        end
                    end)
                end
            end

        elseif distance >= self.unload_distance then
            -- Descargar assets de la zona
            for _, asset_path in ipairs(zone.assets) do
                if self.resource_manager.loaded_resources[asset_path] then
                    if self.resource_manager:unload_resource(asset_path) then
                        print("Streamed out:", asset_path)
                    end
                end
            end
        end
    end
end

function update(self, dt)
    self.last_update = self.last_update + dt

    if self.last_update >= self.update_interval then
        update_streaming(self)
        self.last_update = 0
    end
end

-- Preload crítico
local function preload_critical_assets(self)
    local critical_assets = {
        "textures/ui_atlas.png",
        "sounds/ui_click.ogg",
        "fonts/main_font.font",
        "textures/player_atlas.png"
    }

    for _, asset_path in ipairs(critical_assets) do
        self.resource_manager:register_resource(asset_path, {
            priority = 1, -- CRITICAL
            streaming = false
        })

        self.resource_manager:load_resource(asset_path, function(resource, error)
            if resource then
                print("Critical asset loaded:", asset_path)
            else
                print("CRITICAL ASSET FAILED:", asset_path, error)
            end
        end)
    end
end

function on_message(self, message_id, message, sender)
    if message_id == hash("preload_critical") then
        preload_critical_assets(self)

    elseif message_id == hash("clear_cache") then
        self.resource_manager:clear_cache()

    elseif message_id == hash("memory_warning") then
        self.resource_manager:emergency_cleanup()
    end
end

🔄 Asset Pipeline Automation

Build Pipeline

-- build_pipeline.lua
local BuildPipeline = {}

local ASSET_PROCESSORS = {
    texture = "process_texture",
    audio = "process_audio",
    model = "process_model",
    animation = "process_animation"
}

function BuildPipeline.new(config)
    local self = {
        config = config or {},
        processed_assets = {},
        build_cache = {},
        optimization_settings = {
            texture_compression = true,
            audio_compression = true,
            model_optimization = true,
            atlas_generation = true
        }
    }
    return self
end

function BuildPipeline:process_asset(asset_path, target_platform)
    local asset_info = self:get_asset_info(asset_path)
    local processor_name = ASSET_PROCESSORS[asset_info.type]

    if not processor_name then
        print("Warning: No processor for asset type:", asset_info.type)
        return asset_path
    end

    local cache_key = self:get_cache_key(asset_path, target_platform)

    -- Verificar cache
    if self.build_cache[cache_key] and
       self:is_cache_valid(cache_key, asset_path) then
        return self.build_cache[cache_key].output_path
    end

    -- Procesar asset
    local processor = self[processor_name]
    if processor then
        local output_path = processor(self, asset_path, asset_info, target_platform)

        -- Guardar en cache
        self.build_cache[cache_key] = {
            output_path = output_path,
            timestamp = self:get_file_timestamp(asset_path),
            platform = target_platform
        }

        return output_path
    end

    return asset_path
end

function BuildPipeline:process_texture(asset_path, asset_info, target_platform)
    local output_path = self:get_output_path(asset_path, target_platform)

    -- Configuración por plataforma
    local texture_config = {
        android = {
            format = "etc2",
            max_size = 2048,
            quality = "normal"
        },
        ios = {
            format = "astc",
            max_size = 2048,
            quality = "high"
        },
        html5 = {
            format = "dxt",
            max_size = 1024,
            quality = "normal"
        }
    }

    local config = texture_config[target_platform] or texture_config.android

    -- Optimizar textura
    if self.optimization_settings.texture_compression then
        self:compress_texture(asset_path, output_path, config)
    else
        self:copy_file(asset_path, output_path)
    end

    -- Generar mipmaps si es necesario
    if asset_info.generate_mipmaps then
        self:generate_mipmaps(output_path)
    end

    return output_path
end

function BuildPipeline:process_audio(asset_path, asset_info, target_platform)
    local output_path = self:get_output_path(asset_path, target_platform)

    -- Configuración de audio por plataforma
    local audio_config = {
        android = {
            format = "ogg",
            bitrate = 128,
            sample_rate = 44100
        },
        ios = {
            format = "m4a",
            bitrate = 160,
            sample_rate = 44100
        },
        html5 = {
            format = "ogg",
            bitrate = 96,
            sample_rate = 44100
        }
    }

    local config = audio_config[target_platform] or audio_config.android

    if self.optimization_settings.audio_compression then
        self:compress_audio(asset_path, output_path, config)
    else
        self:copy_file(asset_path, output_path)
    end

    return output_path
end

function BuildPipeline:generate_atlases(textures, target_platform)
    local atlases = {}
    local atlas_groups = self:group_textures_for_atlases(textures)

    for group_name, group_textures in pairs(atlas_groups) do
        local atlas_path = string.format("atlases/%s_%s.png", group_name, target_platform)
        local atlas_data = self:pack_textures_to_atlas(group_textures, atlas_path)

        atlases[group_name] = {
            texture_path = atlas_path,
            uv_data = atlas_data.uv_mapping,
            size = atlas_data.size
        }

        print("Generated atlas:", atlas_path, "Size:", atlas_data.size.x .. "x" .. atlas_data.size.y)
    end

    return atlases
end

function BuildPipeline:group_textures_for_atlases(textures)
    local groups = {}

    for _, texture_path in ipairs(textures) do
        local texture_info = self:get_asset_info(texture_path)
        local group_name = texture_info.atlas_group or "default"

        if not groups[group_name] then
            groups[group_name] = {}
        end

        table.insert(groups[group_name], texture_path)
    end

    return groups
end

function BuildPipeline:pack_textures_to_atlas(textures, output_path)
    -- Algoritmo de packing simple (bin packing)
    local atlas_size = vmath.vector3(1024, 1024, 0)
    local current_x, current_y = 0, 0
    local row_height = 0
    local uv_mapping = {}

    for _, texture_path in ipairs(textures) do
        local texture_info = self:get_texture_dimensions(texture_path)
        local w, h = texture_info.width, texture_info.height

        -- Verificar si cabe en la fila actual
        if current_x + w > atlas_size.x then
            -- Nueva fila
            current_x = 0
            current_y = current_y + row_height
            row_height = 0
        end

        -- Verificar si cabe en el atlas
        if current_y + h > atlas_size.y then
            print("Warning: Texture doesn't fit in atlas:", texture_path)
            goto continue
        end

        -- Guardar UV mapping
        uv_mapping[texture_path] = {
            x = current_x / atlas_size.x,
            y = current_y / atlas_size.y,
            w = w / atlas_size.x,
            h = h / atlas_size.y
        }

        -- Actualizar posición
        current_x = current_x + w
        row_height = math.max(row_height, h)

        ::continue::
    end

    -- Generar atlas físico (esto requeriría herramientas externas)
    self:create_atlas_texture(textures, uv_mapping, output_path, atlas_size)

    return {
        uv_mapping = uv_mapping,
        size = atlas_size
    }
end

return BuildPipeline

Automated Asset Validation

-- asset_validator.script
local AssetValidator = {}

local VALIDATION_RULES = {
    texture = {
        max_size = {width = 2048, height = 2048},
        required_formats = {"png", "jpg"},
        power_of_two = false, -- Defold no requiere pot
        max_file_size = 4 * 1024 * 1024, -- 4MB
        compression_check = true
    },
    audio = {
        max_duration = 120, -- 2 minutos
        required_formats = {"ogg", "wav"},
        max_file_size = 10 * 1024 * 1024, -- 10MB
        sample_rate_check = true,
        valid_sample_rates = {22050, 44100, 48000}
    },
    spine = {
        max_bones = 100,
        max_animations = 50,
        atlas_check = true,
        version_check = true
    }
}

function init(self)
    self.validation_errors = {}
    self.validation_warnings = {}
    self.asset_metrics = {}
end

local function validate_texture(self, asset_path)
    local rules = VALIDATION_RULES.texture
    local errors = {}
    local warnings = {}

    -- Verificar existencia
    if not self:file_exists(asset_path) then
        table.insert(errors, "File does not exist: " .. asset_path)
        return errors, warnings
    end

    -- Verificar formato
    local extension = string.match(asset_path, "%.([^%.]+)$"):lower()
    if not self:table_contains(rules.required_formats, extension) then
        table.insert(errors, "Invalid format: " .. extension .. " (expected: " .. table.concat(rules.required_formats, ", ") .. ")")
    end

    -- Verificar dimensiones
    local dimensions = self:get_image_dimensions(asset_path)
    if dimensions then
        if dimensions.width > rules.max_size.width or dimensions.height > rules.max_size.height then
            table.insert(errors, string.format("Texture too large: %dx%d (max: %dx%d)",
                                               dimensions.width, dimensions.height,
                                               rules.max_size.width, rules.max_size.height))
        end

        -- Verificar potencia de 2 (warning)
        if rules.power_of_two then
            if not self:is_power_of_two(dimensions.width) or not self:is_power_of_two(dimensions.height) then
                table.insert(warnings, "Texture is not power of two: " .. dimensions.width .. "x" .. dimensions.height)
            end
        end

        -- Verificar aspect ratio extremo
        local aspect_ratio = dimensions.width / dimensions.height
        if aspect_ratio > 8 or aspect_ratio < 0.125 then
            table.insert(warnings, "Extreme aspect ratio: " .. string.format("%.2f", aspect_ratio))
        end
    end

    -- Verificar tamaño de archivo
    local file_size = self:get_file_size(asset_path)
    if file_size > rules.max_file_size then
        table.insert(warnings, string.format("Large file size: %.2fMB (consider compression)",
                                            file_size / (1024 * 1024)))
    end

    -- Verificar compresión
    if rules.compression_check and extension == "png" then
        local compression_info = self:analyze_png_compression(asset_path)
        if compression_info.can_optimize then
            table.insert(warnings, "PNG can be optimized further (estimated savings: " ..
                                  compression_info.potential_savings .. "%)")
        end
    end

    return errors, warnings
end

local function validate_audio(self, asset_path)
    local rules = VALIDATION_RULES.audio
    local errors = {}
    local warnings = {}

    if not self:file_exists(asset_path) then
        table.insert(errors, "File does not exist: " .. asset_path)
        return errors, warnings
    end

    local extension = string.match(asset_path, "%.([^%.]+)$"):lower()
    if not self:table_contains(rules.required_formats, extension) then
        table.insert(errors, "Invalid audio format: " .. extension)
    end

    local audio_info = self:get_audio_info(asset_path)
    if audio_info then
        -- Verificar duración
        if audio_info.duration > rules.max_duration then
            table.insert(warnings, string.format("Long audio file: %.1fs (consider streaming for files > %.1fs)",
                                                 audio_info.duration, rules.max_duration))
        end

        -- Verificar sample rate
        if rules.sample_rate_check and not self:table_contains(rules.valid_sample_rates, audio_info.sample_rate) then
            table.insert(warnings, "Non-standard sample rate: " .. audio_info.sample_rate .. "Hz")
        end

        -- Verificar bitrate
        if audio_info.bitrate and audio_info.bitrate > 320 then
            table.insert(warnings, "High bitrate: " .. audio_info.bitrate .. "kbps (consider lower bitrate for mobile)")
        end
    end

    local file_size = self:get_file_size(asset_path)
    if file_size > rules.max_file_size then
        table.insert(errors, string.format("Audio file too large: %.2fMB", file_size / (1024 * 1024)))
    end

    return errors, warnings
end

local function validate_spine_animation(self, asset_path)
    local rules = VALIDATION_RULES.spine
    local errors = {}
    local warnings = {}

    if not self:file_exists(asset_path) then
        table.insert(errors, "Spine file does not exist: " .. asset_path)
        return errors, warnings
    end

    local spine_data = self:parse_spine_json(asset_path)
    if not spine_data then
        table.insert(errors, "Invalid Spine JSON format")
        return errors, warnings
    end

    -- Verificar número de huesos
    if spine_data.bones and #spine_data.bones > rules.max_bones then
        table.insert(warnings, string.format("High bone count: %d (performance impact on mobile)",
                                            #spine_data.bones))
    end

    -- Verificar número de animaciones
    if spine_data.animations then
        local anim_count = 0
        for _ in pairs(spine_data.animations) do
            anim_count = anim_count + 1
        end

        if anim_count > rules.max_animations then
            table.insert(warnings, string.format("Many animations: %d (consider splitting)",
                                                anim_count))
        end
    end

    -- Verificar atlas asociado
    if rules.atlas_check then
        local atlas_path = string.gsub(asset_path, "%.json$", ".atlas")
        if not self:file_exists(atlas_path) then
            table.insert(errors, "Missing atlas file: " .. atlas_path)
        end
    end

    -- Verificar versión de Spine
    if rules.version_check and spine_data.skeleton then
        local version = spine_data.skeleton.spine
        if version and self:compare_versions(version, "3.8.0") < 0 then
            table.insert(warnings, "Old Spine version: " .. version .. " (consider updating)")
        end
    end

    return errors, warnings
end

local function validate_all_assets(self, asset_list)
    local total_errors = 0
    local total_warnings = 0

    for _, asset_path in ipairs(asset_list) do
        local asset_type = self:get_asset_type(asset_path)
        local errors, warnings = {}, {}

        if asset_type == "texture" then
            errors, warnings = validate_texture(self, asset_path)
        elseif asset_type == "audio" then
            errors, warnings = validate_audio(self, asset_path)
        elseif asset_type == "spine" then
            errors, warnings = validate_spine_animation(self, asset_path)
        end

        if #errors > 0 then
            self.validation_errors[asset_path] = errors
            total_errors = total_errors + #errors
        end

        if #warnings > 0 then
            self.validation_warnings[asset_path] = warnings
            total_warnings = total_warnings + #warnings
        end

        -- Calcular métricas
        self.asset_metrics[asset_path] = {
            file_size = self:get_file_size(asset_path),
            type = asset_type,
            validated_at = os.time()
        }
    end

    return {
        total_assets = #asset_list,
        total_errors = total_errors,
        total_warnings = total_warnings,
        assets_with_errors = self:count_keys(self.validation_errors),
        assets_with_warnings = self:count_keys(self.validation_warnings)
    }
end

local function generate_validation_report(self)
    local report = {
        timestamp = os.date("%Y-%m-%d %H:%M:%S"),
        summary = {
            total_errors = self:count_keys(self.validation_errors),
            total_warnings = self:count_keys(self.validation_warnings)
        },
        errors = self.validation_errors,
        warnings = self.validation_warnings,
        metrics = self:calculate_asset_metrics()
    }

    -- Guardar reporte
    local report_json = json.encode(report)
    local report_path = "build/asset_validation_report.json"
    self:write_file(report_path, report_json)

    print("Validation report generated:", report_path)
    return report
end

return AssetValidator

🔍 Asset Analytics y Monitoring

Usage Analytics

-- asset_analytics.script
local AssetAnalytics = {}

function init(self)
    self.usage_stats = {}
    self.load_times = {}
    self.memory_usage = {}
    self.error_tracking = {}
    self.session_start = socket.gettime()
    self.analytics_enabled = true

    -- Configurar hooks para tracking
    self:setup_resource_hooks()
end

local function setup_resource_hooks(self)
    -- Hook resource loading
    local original_load = resource.load
    resource.load = function(path)
        local start_time = socket.gettime()
        local result = original_load(path)
        local load_time = socket.gettime() - start_time

        self:track_resource_load(path, load_time, result ~= nil)
        return result
    end

    -- Hook resource release
    local original_release = resource.release
    resource.release = function(path)
        self:track_resource_release(path)
        return original_release(path)
    end
end

local function track_resource_load(self, path, load_time, success)
    if not self.analytics_enabled then return end

    -- Estadísticas de uso
    if not self.usage_stats[path] then
        self.usage_stats[path] = {
            load_count = 0,
            total_load_time = 0,
            first_loaded = socket.gettime(),
            last_loaded = 0,
            error_count = 0
        }
    end

    local stats = self.usage_stats[path]
    stats.load_count = stats.load_count + 1
    stats.total_load_time = stats.total_load_time + load_time
    stats.last_loaded = socket.gettime()

    if not success then
        stats.error_count = stats.error_count + 1
        self:track_loading_error(path, "Load failed")
    end

    -- Tracking de tiempos de carga
    if not self.load_times[path] then
        self.load_times[path] = {}
    end
    table.insert(self.load_times[path], load_time)

    -- Mantener solo últimas 20 cargas
    if #self.load_times[path] > 20 then
        table.remove(self.load_times[path], 1)
    end

    print(string.format("Asset loaded: %s (%.2fms, attempt #%d)",
                       path, load_time * 1000, stats.load_count))
end

local function track_resource_release(self, path)
    if not self.analytics_enabled then return end

    local stats = self.usage_stats[path]
    if stats then
        stats.last_released = socket.gettime()
        stats.session_duration = stats.last_released - stats.first_loaded
    end
end

local function track_loading_error(self, path, error_message)
    if not self.error_tracking[path] then
        self.error_tracking[path] = {}
    end

    table.insert(self.error_tracking[path], {
        timestamp = socket.gettime(),
        error = error_message,
        session_time = socket.gettime() - self.session_start
    })
end

local function analyze_usage_patterns(self)
    local analysis = {
        most_loaded = {},
        slowest_loading = {},
        error_prone = {},
        memory_hogs = {},
        unused_assets = {}
    }

    -- Assets más cargados
    local load_counts = {}
    for path, stats in pairs(self.usage_stats) do
        table.insert(load_counts, {path = path, count = stats.load_count})
    end
    table.sort(load_counts, function(a, b) return a.count > b.count end)
    analysis.most_loaded = {unpack(load_counts, 1, 10)} -- Top 10

    -- Assets más lentos
    local load_times = {}
    for path, times in pairs(self.load_times) do
        if #times > 0 then
            local avg_time = 0
            for _, time in ipairs(times) do
                avg_time = avg_time + time
            end
            avg_time = avg_time / #times

            table.insert(load_times, {path = path, avg_time = avg_time})
        end
    end
    table.sort(load_times, function(a, b) return a.avg_time > b.avg_time end)
    analysis.slowest_loading = {unpack(load_times, 1, 10)}

    -- Assets con errores
    for path, errors in pairs(self.error_tracking) do
        if #errors > 0 then
            table.insert(analysis.error_prone, {
                path = path,
                error_count = #errors,
                last_error = errors[#errors]
            })
        end
    end

    -- Assets no usados
    local current_time = socket.gettime()
    for path, stats in pairs(self.usage_stats) do
        local time_since_last_use = current_time - stats.last_loaded
        if time_since_last_use > 300 then -- 5 minutos sin usar
            table.insert(analysis.unused_assets, {
                path = path,
                time_unused = time_since_last_use
            })
        end
    end

    return analysis
end

local function generate_performance_report(self)
    local analysis = analyze_usage_patterns(self)
    local session_duration = socket.gettime() - self.session_start

    local report = {
        session_info = {
            duration = session_duration,
            timestamp = os.date("%Y-%m-%d %H:%M:%S"),
            platform = sys.get_sys_info().system_name
        },
        summary = {
            total_assets_loaded = self:count_keys(self.usage_stats),
            total_load_attempts = self:sum_load_counts(),
            total_errors = self:count_total_errors(),
            average_load_time = self:calculate_average_load_time()
        },
        analysis = analysis,
        recommendations = self:generate_recommendations(analysis)
    }

    return report
end

local function generate_recommendations(self, analysis)
    local recommendations = {}

    -- Recomendaciones basadas en assets lentos
    for _, slow_asset in ipairs(analysis.slowest_loading) do
        if slow_asset.avg_time > 0.1 then -- Más de 100ms
            table.insert(recommendations, {
                type = "performance",
                priority = "high",
                asset = slow_asset.path,
                issue = "Slow loading time",
                suggestion = "Consider compression or splitting asset",
                impact = string.format("Average load time: %.2fms", slow_asset.avg_time * 1000)
            })
        end
    end

    -- Recomendaciones para assets con errores
    for _, error_asset in ipairs(analysis.error_prone) do
        table.insert(recommendations, {
            type = "reliability",
            priority = "critical",
            asset = error_asset.path,
            issue = "Loading errors detected",
            suggestion = "Check asset integrity and format",
            impact = string.format("%d errors recorded", error_asset.error_count)
        })
    end

    -- Recomendaciones para assets no usados
    if #analysis.unused_assets > 5 then
        table.insert(recommendations, {
            type = "optimization",
            priority = "medium",
            issue = "Many unused assets",
            suggestion = "Consider lazy loading or removing unused assets",
            impact = string.format("%d assets unused for >5 minutes", #analysis.unused_assets)
        })
    end

    return recommendations
end

function update(self, dt)
    -- Enviar analytics periódicamente
    if socket.gettime() - self.last_analytics_send > 60 then -- Cada minuto
        self:send_analytics_data()
        self.last_analytics_send = socket.gettime()
    end
end

local function send_analytics_data(self)
    local report = generate_performance_report(self)

    -- Enviar a servicio de analytics (simulado)
    http.request("https://analytics.example.com/assets", "POST", function(self, id, response)
        if response.status == 200 then
            print("Analytics data sent successfully")
        else
            print("Failed to send analytics:", response.status)
        end
    end, nil, nil, {["Content-Type"] = "application/json"}, json.encode(report))
end

return AssetAnalytics

📚 Recursos y Referencias

Asset Management Tools

Optimization Tools

🎯 Ejercicios Propuestos

  1. Hot Reloading System: Implementa un sistema que recargue assets automáticamente durante desarrollo.

  2. CDN Integration: Crea un sistema que descargue assets desde una CDN con fallbacks locales.

  3. Asset Bundling: Desarrolla un sistema de bundles que agrupe assets relacionados.

  4. Progressive Loading: Implementa carga progresiva con diferentes niveles de calidad.

  5. Asset Dependency Graph: Crea un sistema que rastree y resuelva dependencias entre assets.

La gestión eficiente de assets es crucial para crear juegos escalables y con buen rendimiento. Un pipeline bien diseñado te permitirá iterar rápidamente durante el desarrollo y optimizar automáticamente para diferentes plataformas de distribución.