Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions crowdin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,5 @@ files:
translation: /docs/%locale%/**/%file_name%.md
- source: /docs/**/zh-CN.json
translation: /docs/**/%locale%.json
- source: /pkg-translations/zh.json
translation: /pkg-translations/%locale%.json
16 changes: 16 additions & 0 deletions pkg-translations/zh.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"koishi-plugin-booru-konachan": "最好的涩图插件! - Konachan 图源支持",
"koishi-plugin-booru-safebooru": "最好的涩图插件! - safebooru 图源支持",
"koishi-plugin-booru-e621": "最好的涩图插件! - e621/e926 图源支持",
"koishi-plugin-booru-gelbooru": "最好的涩图插件! - Gelbooru 图源支持",
"koishi-plugin-booru-lolicon": "最好的涩图插件! - Lolicon 图源支持",
"koishi-plugin-booru-moehu": "最好的涩图插件! - moehu 图源支持",
"koishi-plugin-booru-yande": "最好的涩图插件! - Yande 图源支持",
"koishi-plugin-booru": "最好的涩图插件!",
"koishi-plugin-booru-derpibooru": "最好的涩图插件! - Derpibooru 图源支持",
"koishi-plugin-booru-pixiv": "最好的涩图插件! - Pixiv 图源支持",
"koishi-plugin-booru-sankaku": "最好的涩图插件! - sankaku 图源支持",
"koishi-plugin-booru-danbooru": "最好的涩图插件! - Danbooru 图源支持",
"koishi-plugin-booru-lolibooru": "最好的涩图插件! - Lolibooru 图源支持",
"koishi-plugin-booru-local": "最好的涩图插件! - 本地图源支持"
}
58 changes: 58 additions & 0 deletions scripts/prepare-crowdin-upload.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from json import dump, load
from pathlib import Path
from typing import Dict, List

def main():
"""
Prepare files for Crowdin upload.

This script walks through the 'packages' directory, reads each package's
`package.json` file, and extracts the relevant information for translation.

Currently, it focuses on the 'description' field under the 'koishi' key,
which should be a dictionary mapping language codes to their respective
translations. e.g., {"en": "English description", "zh": "Chinese description"}.

The extracted data is then saved into JSON files named `pkg-translations/<locale>.json`,
which is structured as a dictionary where each key is the package name
and the value is the corresponding translation.
e.g., `pkg-translations/en.json` : {"package-name": "English description"}.
"""

root = Path(__file__).parent.parent / "packages"
translations: Dict[str, Dict[str, str]] = {} # locale -> { package-name -> translation }

# Create output directory if it doesn't exist
(root.parent / "pkg-translations").mkdir(exist_ok=True)

for package in root.iterdir():
if not package.is_dir():
continue
pkg_json_path = package / "package.json"
if not pkg_json_path.exists():
continue

with pkg_json_path.open("r", encoding="utf-8") as f:
pkg_data = load(f)

pkg_name: str = pkg_data.get("name", "")
koishi_data: Dict = pkg_data.get("koishi", {})
description_data: Dict[str, str] = koishi_data.get("description", {})

if not isinstance(description_data, dict):
continue

for lang, desc in description_data.items():
if lang not in translations:
translations[lang] = {}
translations[lang][pkg_name] = desc

for locale, data in translations.items():
output_path = root.parent / f"pkg-translations/{locale}.json"
with output_path.open("w", encoding="utf-8") as f:
dump(data, f, ensure_ascii=False, indent=2)

print(f"Translation data has been written to pkg-translations/")

if __name__ == "__main__":
main()
96 changes: 96 additions & 0 deletions scripts/sync-crowdin-translation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from json import dump, load
from pathlib import Path
from typing import Dict, List

def main():
"""
Sync translations from Crowdin back to package.json files.

This script reads the translation files from 'pkg-translations' directory
and updates each package's `package.json` file with the translated descriptions.

It takes the JSON files structured as `pkg-translations/<locale>.json`
where each file contains a dictionary mapping package names to their
translations for that locale, and merges them back into the 'description'
field under the 'koishi' key in each package's package.json.

e.g., `pkg-translations/en.json` : {"package-name": "English description"}
gets merged into `packages/package-name/package.json` under koishi.description.en
"""

root = Path(__file__).parent.parent
packages_dir = root / "packages"
translations_dir = root / "pkg-translations"

if not translations_dir.exists():
print(f"Translation directory {translations_dir} does not exist")
return

# Read all translation files
all_translations: Dict[str, Dict[str, str]] = {} # locale -> { package-name -> translation }

for translation_file in translations_dir.glob("*.json"):
locale = translation_file.stem
try:
with translation_file.open("r", encoding="utf-8") as f:
translations = load(f)
all_translations[locale] = translations
print(f"Loaded {len(translations)} translations for locale '{locale}'")
except Exception as e:
print(f"Error loading translation file {translation_file}: {e}")
continue

if not all_translations:
print("No translation files found")
return

# Update package.json files
updated_packages = 0

for package_dir in packages_dir.iterdir():
if not package_dir.is_dir():
continue

pkg_json_path = package_dir / "package.json"
if not pkg_json_path.exists():
continue

try:
with pkg_json_path.open("r", encoding="utf-8") as f:
pkg_data = load(f)
except Exception as e:
print(f"Error reading {pkg_json_path}: {e}")
continue

pkg_name = pkg_data.get("name", "")
if not pkg_name:
continue

# Initialize koishi section if it doesn't exist
if "koishi" not in pkg_data:
pkg_data["koishi"] = {}

if "description" not in pkg_data["koishi"]:
pkg_data["koishi"]["description"] = {}

# Update descriptions with translations
updated = False
for locale, translations in all_translations.items():
if pkg_name in translations:
pkg_data["koishi"]["description"][locale] = translations[pkg_name]
updated = True

# Write back to package.json if updated
if updated:
try:
with pkg_json_path.open("w", encoding="utf-8") as f:
dump(pkg_data, f, ensure_ascii=False, indent=2)
updated_packages += 1
print(f"Updated package: {pkg_name}")
except Exception as e:
print(f"Error writing {pkg_json_path}: {e}")

print(f"Successfully updated {updated_packages} packages")

if __name__ == "__main__":
main()
Loading