- Исправлена незакрытая скобка в _build_test_command (строка 745) - Добавлена поддержка k8s preset'ов: выполнение create_k8s_cluster.py перед create.yml - Обновлены образы в k8s preset'ах: заменен недоступный ghcr.io/ansible-community/molecule-ubuntu-systemd:jammy на inecs/ansible-lab:ubuntu22-latest - Обновлены preset'ы в базе данных через SQL - Обновлены файлы: k8s-single.yml, k8s-multi.yml, k8s-istio-full.yml
234 lines
10 KiB
Python
234 lines
10 KiB
Python
"""Migrate presets and dockerfiles from filesystem to database
|
||
|
||
Revision ID: 004
|
||
Revises: 003
|
||
Create Date: 2024-01-03 12:00:00.000000
|
||
|
||
"""
|
||
from alembic import op
|
||
import sqlalchemy as sa
|
||
from sqlalchemy.dialects import postgresql
|
||
import yaml
|
||
import json
|
||
from pathlib import Path
|
||
from datetime import datetime
|
||
|
||
# revision identifiers, used by Alembic.
|
||
revision = '004'
|
||
down_revision = '003'
|
||
branch_labels = None
|
||
depends_on = None
|
||
|
||
|
||
def upgrade():
|
||
"""Перенос preset'ов и dockerfiles из файловой системы в БД"""
|
||
connection = op.get_bind()
|
||
|
||
# Получаем путь к папке presets относительно файла миграции
|
||
# Пресеты теперь находятся в alembic/presets
|
||
alembic_dir = Path(__file__).parent.parent
|
||
presets_dir = alembic_dir / "presets"
|
||
k8s_presets_dir = presets_dir / "k8s"
|
||
|
||
# Если не найдено в alembic, пробуем старый путь (для обратной совместимости)
|
||
if not presets_dir.exists():
|
||
import os
|
||
project_root = Path(os.getenv("PROJECT_ROOT", "/workspace"))
|
||
old_presets_dir = project_root / "molecule" / "presets"
|
||
if old_presets_dir.exists():
|
||
presets_dir = old_presets_dir
|
||
k8s_presets_dir = presets_dir / "k8s"
|
||
|
||
# Функция для импорта preset'а
|
||
def import_preset(preset_file, category='main'):
|
||
try:
|
||
with open(preset_file) as f:
|
||
content = f.read()
|
||
preset_data = yaml.safe_load(content) or {}
|
||
|
||
# Извлечение описания из комментария
|
||
description = None
|
||
for line in content.split('\n'):
|
||
if line.strip().startswith('#description:'):
|
||
description = line.split('#description:')[1].strip()
|
||
break
|
||
|
||
# Проверка существования в БД
|
||
result = connection.execute(
|
||
sa.text("SELECT id FROM presets WHERE name = :name"),
|
||
{"name": preset_file.stem}
|
||
)
|
||
if result.fetchone():
|
||
return False
|
||
|
||
# Преобразуем dict/list в JSON строки для PostgreSQL
|
||
hosts_json = json.dumps(preset_data.get('hosts', []))
|
||
images_json = json.dumps(preset_data.get('images', {}))
|
||
systemd_defaults_json = json.dumps(preset_data.get('systemd_defaults', {}))
|
||
kind_clusters_json = json.dumps(preset_data.get('kind_clusters', []))
|
||
|
||
connection.execute(
|
||
sa.text("""
|
||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||
VALUES (:name, :category, :description, :content, :docker_network, CAST(:hosts AS jsonb), CAST(:images AS jsonb), CAST(:systemd_defaults AS jsonb), CAST(:kind_clusters AS jsonb), :created_at, :updated_at)
|
||
"""),
|
||
{
|
||
'name': preset_file.stem,
|
||
'category': category,
|
||
'description': description,
|
||
'content': content,
|
||
'docker_network': preset_data.get('docker_network'),
|
||
'hosts': hosts_json,
|
||
'images': images_json,
|
||
'systemd_defaults': systemd_defaults_json,
|
||
'kind_clusters': kind_clusters_json,
|
||
'created_at': datetime.utcnow(),
|
||
'updated_at': datetime.utcnow()
|
||
}
|
||
)
|
||
return True
|
||
except Exception as e:
|
||
print(f"Ошибка при импорте preset {preset_file.name}: {e}")
|
||
return False
|
||
|
||
# Основные preset'ы из корня папки presets
|
||
if presets_dir.exists():
|
||
for preset_file in presets_dir.glob("*.yml"):
|
||
if preset_file.name == "deploy.yml":
|
||
continue
|
||
import_preset(preset_file, category='main')
|
||
|
||
# Пресеты из папки examples
|
||
examples_dir = presets_dir / "examples"
|
||
if examples_dir.exists():
|
||
for preset_file in examples_dir.glob("*.yml"):
|
||
import_preset(preset_file, category='main')
|
||
|
||
# K8s preset'ы
|
||
if k8s_presets_dir.exists():
|
||
for preset_file in k8s_presets_dir.glob("*.yml"):
|
||
try:
|
||
with open(preset_file) as f:
|
||
content = f.read()
|
||
preset_data = yaml.safe_load(content) or {}
|
||
|
||
# Извлечение описания из комментария
|
||
description = None
|
||
for line in content.split('\n'):
|
||
if line.strip().startswith('#description:'):
|
||
description = line.split('#description:')[1].strip()
|
||
break
|
||
|
||
# Проверка существования в БД
|
||
result = connection.execute(
|
||
sa.text("SELECT id FROM presets WHERE name = :name"),
|
||
{"name": preset_file.stem}
|
||
)
|
||
if result.fetchone():
|
||
continue
|
||
|
||
# Преобразуем dict/list в JSON строки для PostgreSQL
|
||
hosts_json = json.dumps(preset_data.get('hosts', []))
|
||
images_json = json.dumps(preset_data.get('images', {}))
|
||
systemd_defaults_json = json.dumps(preset_data.get('systemd_defaults', {}))
|
||
kind_clusters_json = json.dumps(preset_data.get('kind_clusters', []))
|
||
|
||
connection.execute(
|
||
sa.text("""
|
||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||
VALUES (:name, :category, :description, :content, :docker_network, CAST(:hosts AS jsonb), CAST(:images AS jsonb), CAST(:systemd_defaults AS jsonb), CAST(:kind_clusters AS jsonb), :created_at, :updated_at)
|
||
"""),
|
||
{
|
||
'name': preset_file.stem,
|
||
'category': 'k8s',
|
||
'description': description,
|
||
'content': content,
|
||
'docker_network': preset_data.get('docker_network'),
|
||
'hosts': hosts_json,
|
||
'images': images_json,
|
||
'systemd_defaults': systemd_defaults_json,
|
||
'kind_clusters': kind_clusters_json,
|
||
'created_at': datetime.utcnow(),
|
||
'updated_at': datetime.utcnow()
|
||
}
|
||
)
|
||
except Exception as e:
|
||
print(f"Ошибка при импорте k8s preset {preset_file.name}: {e}")
|
||
|
||
# ========== ПЕРЕНОС DOCKERFILES ==========
|
||
# Dockerfiles теперь находятся в alembic/dockerfiles
|
||
# Используем путь относительно файла миграции
|
||
alembic_dir = Path(__file__).parent.parent
|
||
dockerfiles_dir = alembic_dir / "dockerfiles"
|
||
|
||
# Если не найдено, пробуем альтернативные пути
|
||
if not dockerfiles_dir.exists():
|
||
alt_paths = [
|
||
project_root / "app" / "alembic" / "dockerfiles",
|
||
Path("/app/app/alembic/dockerfiles"),
|
||
]
|
||
for alt_path in alt_paths:
|
||
if alt_path.exists():
|
||
dockerfiles_dir = alt_path
|
||
break
|
||
|
||
if dockerfiles_dir.exists():
|
||
for dockerfile_path in dockerfiles_dir.rglob("Dockerfile*"):
|
||
if not dockerfile_path.is_file():
|
||
continue
|
||
|
||
try:
|
||
# Имя из пути (например, ubuntu22/Dockerfile -> ubuntu22)
|
||
relative_path = dockerfile_path.relative_to(dockerfiles_dir)
|
||
name = str(relative_path.parent) if relative_path.parent != Path('.') else relative_path.stem
|
||
|
||
# Пропускаем специальные файлы
|
||
if name in ['ansible-controller', 'k8s', 'k8s-portforward']:
|
||
continue
|
||
|
||
# Проверка существования в БД
|
||
result = connection.execute(
|
||
sa.text("SELECT id FROM dockerfiles WHERE name = :name"),
|
||
{"name": name}
|
||
)
|
||
if result.fetchone():
|
||
continue
|
||
|
||
content = dockerfile_path.read_text(encoding='utf-8')
|
||
|
||
# Определяем базовый образ из содержимого
|
||
base_image = None
|
||
for line in content.split('\n'):
|
||
if line.strip().startswith('FROM'):
|
||
base_image = line.strip().replace('FROM', '').strip().split()[0]
|
||
break
|
||
|
||
# Платформы по умолчанию: linux/amd64 (x86_64), linux/386 (x86) и linux/arm64 (macOS M1)
|
||
default_platforms = ["linux/amd64", "linux/386", "linux/arm64"]
|
||
|
||
connection.execute(
|
||
sa.text("""
|
||
INSERT INTO dockerfiles (name, description, content, base_image, tags, platforms, status, created_at, updated_at)
|
||
VALUES (:name, :description, :content, :base_image, :tags, :platforms, :status, :created_at, :updated_at)
|
||
"""),
|
||
{
|
||
'name': name,
|
||
'description': f'Dockerfile for {name}',
|
||
'content': content,
|
||
'base_image': base_image,
|
||
'tags': None,
|
||
'platforms': default_platforms,
|
||
'status': 'active',
|
||
'created_at': datetime.utcnow(),
|
||
'updated_at': datetime.utcnow()
|
||
}
|
||
)
|
||
except Exception as e:
|
||
print(f"Ошибка при импорте dockerfile {dockerfile_path}: {e}")
|
||
|
||
|
||
def downgrade():
|
||
# При откате миграции данные остаются в БД
|
||
# Удаление файлов не выполняется для безопасности
|
||
pass
|