spine批量还原脚本

spine4批量解包大小缩放值可以直接在atlas读取

import os
import subprocess
import glob
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
from unpackpng import *
fal = []
SPINE_EXE = r'"C:\Program Files\Spine\Spine"'
DEFAULT_SCALE = "1.0"
MAX_WORKERS = 15


def get_spine_version_from_skel(skel_path):
    try:
        with open(skel_path, 'rb') as f:
            f.read(9)
            version_bytes = f.read(6)
            version = ''.join([chr(b) for b in version_bytes if 0x20 <= b <= 0x7E])
            if len(version) >= 3 and version.count('.') >= 1:
                return version
    except Exception:
        pass
    return None


def get_spine_version_from_json(json_path):
    try:
        with open(json_path, 'r', encoding='utf-8') as f:
            data = json.load(f)
            version = data.get("skeleton", {}).get("spine", "")
            if version and version.count('.') >= 1:
                return version
    except Exception:
        pass
    return None


def process_single_atlas(atlas_path):
    try:
        scale = DEFAULT_SCALE
        with open(atlas_path, 'r', encoding='utf-8') as f:
            for line in f:
                if line.startswith('scale:'):
                    parts = line.split(':')
                    if len(parts) > 1: 
                        scale = parts[1].strip()
                    break

        base_path = os.path.splitext(atlas_path)[0]
        input_path = None
        spine_ver = None

        skel_path = base_path + '.skel'
        if os.path.exists(skel_path):
            input_path = skel_path
            spine_ver = get_spine_version_from_skel(skel_path)

        if not spine_ver:
            json_path = base_path + '.json'
            if os.path.exists(json_path):
                input_path = json_path
                spine_ver = get_spine_version_from_json(json_path)

        if not input_path:
            return False, f"未找到对应的.skel或.json文件: {atlas_path}"

        if not spine_ver:
            return False, f"无法从文件中确定Spine版本: {atlas_path}"

        output_path = base_path + '.spine'
        rel_path = os.path.splitext(os.path.basename(atlas_path))[0]

        cmd = f'{SPINE_EXE} -i "{input_path}" -o "{output_path}" -s {scale} -r {rel_path} -u {spine_ver}'
        print(f"\n执行命令: {cmd}")

        result = subprocess.run(cmd, shell=True, check=True,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                encoding='utf-8',
                                errors='ignore') 

        if result.stdout:
            print(f"命令输出:\n{result.stdout}")
        if result.stderr:
            print(f"命令错误输出:\n{result.stderr}")

        return True, f"成功处理: {atlas_path} (scale: {scale}, version: {spine_ver})"
    except subprocess.CalledProcessError as e:
        error_msg = e.stderr.strip() if e.stderr else str(e)
        print(f"命令错误输出:\n{error_msg}")
        fal.append(atlas_path)
        return False, f"处理失败: {atlas_path}, 错误: {error_msg}"
    except Exception as e:
        fal.append(atlas_path)
        return False, f"处理异常: {atlas_path}, 错误: {str(e)}"


def batch_process_atlas(root_dir):
    atlas_files = list(glob.glob(os.path.join(root_dir, '**', '*.atlas'), recursive=True))
    total = len(atlas_files)
    print(f"找到 {total} 个.atlas文件待处理")

    success_count = 0
    with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
        futures = {executor.submit(process_single_atlas, f): f for f in atlas_files}

        for i, future in enumerate(as_completed(futures), 1):
            file = futures[future]
            try:
                success, message = future.result()
                if success:
                    success_count += 1
                print(f"[{i}/{total}] {message}")
            except Exception as e:
                print(f"[{i}/{total}] 处理异常: {file}, 错误: {str(e)}")

    print(f"\n处理完成: 成功 {success_count}/{total}")
    return success_count == total


def modify_json_files(directory):
    for root, dirs, files in os.walk(directory):
        for file in files:
            if file.endswith('.json'):
                file_path = os.path.join(root, file)
                try:
                    with open(file_path, 'r', encoding='utf-8') as f:
                        data = json.load(f)

                    if 'skeleton' not in data:
                        data['skeleton'] = {}

                    data['skeleton']['images'] = "./images"

                    with open(file_path, 'w', encoding='utf-8') as f:
                        json.dump(data, f, indent=4, ensure_ascii=False)
                    print(f"已处理: {file_path}")

                except Exception as e:
                    print(f"处理文件 {file_path} 时出错: {str(e)}")


if __name__ == '__main__':
    current_dir = input("请输入要处理的目录路径: ").strip()
    if not os.path.isdir(current_dir):
        print("错误: 指定的路径不是有效目录")
    else:
        print(f"开始处理目录: {current_dir}")
        #modify_json_files(current_dir)

        print("\n=== 开始处理Spine工程 ===")
        spine_success = batch_process_atlas(current_dir)

        print("\n=== 开始处理贴图文件 ===")
        mode = 'Premul'  # ['Normal', 'Premul', 'NonPremul']
        scale = True  # 重新计算帧的范围
        subfolder = True  # 查找子文件夹

        try:
            batch(current_dir, mode, scale, subfolder)
            print("贴图处理完成")
        except Exception as e:
            print(f"贴图处理失败: {str(e)}")

        print("\n所有处理完成")
        if not spine_success:
            print("警告: 部分Spine工程处理失败,请检查上方日志")
        print(fal)

但是大部分spine3的缩放信息在json或者skel中有时甚至搜不到,特别是skel我对这个二进制文件的结构完全不熟悉,请教大佬如何批量自动读取spine3 json或者skel中的版本和缩放比批量还原

给整红温了有一个批量还原工具读取出来的缩放值是17.9实际计算下来是0.4差的太多了

这有一份自己写的3.8版本json和skel运行时文件互转代码,这个格式的话可以拿官方运行时的SkeletonJson和SkeletonBinary两个类互相对照,不过我不清楚editor的spine格式,只能看到运行时格式,不知道有无用