feat: добавлена пометка типа операции (Build/Push) в истории сборок Dockerfile
- Добавлена колонка 'Тип' во все таблицы истории сборок - Для push операций отображается registry вместо платформ - Сохранение пользователя при создании push лога - Исправлена ошибка с logger в push_docker_image endpoint - Улучшено отображение истории сборок с визуальными индикаторами
This commit is contained in:
53
app/alembic/versions/001_initial_migration.py
Normal file
53
app/alembic/versions/001_initial_migration.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Initial migration with users table
|
||||
|
||||
Revision ID: 001_initial
|
||||
Revises:
|
||||
Create Date: 2024-01-01 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Создание таблицы users
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(), nullable=False),
|
||||
sa.Column('hashed_password', sa.String(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
|
||||
sa.Column('is_superuser', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
|
||||
|
||||
# Создание пользователя admin/admin по умолчанию
|
||||
# Пароль будет хеширован при первом входе через UserService
|
||||
# Используем простой временный хеш, который будет обновлен при первом входе
|
||||
import bcrypt
|
||||
temp_hash = bcrypt.hashpw(b'admin', bcrypt.gensalt()).decode('utf-8')
|
||||
|
||||
# Используем параметризованный запрос
|
||||
op.execute(
|
||||
sa.text("""
|
||||
INSERT INTO users (username, hashed_password, is_active, is_superuser)
|
||||
VALUES ('admin', :hash, true, true)
|
||||
ON CONFLICT (username) DO NOTHING;
|
||||
""").bindparams(hash=temp_hash)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_users_username'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_id'), table_name='users')
|
||||
op.drop_table('users')
|
||||
119
app/alembic/versions/002_add_playbooks_and_dockerfiles.py
Normal file
119
app/alembic/versions/002_add_playbooks_and_dockerfiles.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Add playbooks and dockerfiles tables
|
||||
|
||||
Revision ID: 002_add_playbooks
|
||||
Revises: 001_initial
|
||||
Create Date: 2024-01-02 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002_add_playbooks'
|
||||
down_revision = '001_initial'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Создание таблицы playbooks
|
||||
op.create_table(
|
||||
'playbooks',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text()),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('roles', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('variables', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.Column('inventory', sa.Text()),
|
||||
sa.Column('status', sa.String(), server_default='active'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('created_by', sa.String()),
|
||||
sa.Column('updated_by', sa.String()),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_playbooks_id'), 'playbooks', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_playbooks_name'), 'playbooks', ['name'], unique=True)
|
||||
|
||||
# Создание таблицы playbook_test_runs
|
||||
op.create_table(
|
||||
'playbook_test_runs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('playbook_id', sa.Integer(), nullable=False),
|
||||
sa.Column('preset_name', sa.String()),
|
||||
sa.Column('status', sa.String(), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('finished_at', sa.DateTime()),
|
||||
sa.Column('duration', sa.Integer()),
|
||||
sa.Column('output', sa.Text()),
|
||||
sa.Column('error', sa.Text()),
|
||||
sa.Column('returncode', sa.Integer()),
|
||||
sa.Column('user', sa.String()),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.ForeignKeyConstraint(['playbook_id'], ['playbooks.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_playbook_test_runs_id'), 'playbook_test_runs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_playbook_test_runs_playbook_id'), 'playbook_test_runs', ['playbook_id'], unique=False)
|
||||
op.create_index(op.f('ix_playbook_test_runs_preset_name'), 'playbook_test_runs', ['preset_name'], unique=False)
|
||||
|
||||
# Создание таблицы playbook_deployments
|
||||
op.create_table(
|
||||
'playbook_deployments',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('playbook_id', sa.Integer(), nullable=False),
|
||||
sa.Column('inventory', sa.Text()),
|
||||
sa.Column('hosts', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.Column('status', sa.String(), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('finished_at', sa.DateTime()),
|
||||
sa.Column('duration', sa.Integer()),
|
||||
sa.Column('output', sa.Text()),
|
||||
sa.Column('error', sa.Text()),
|
||||
sa.Column('returncode', sa.Integer()),
|
||||
sa.Column('user', sa.String()),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.ForeignKeyConstraint(['playbook_id'], ['playbooks.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_playbook_deployments_id'), 'playbook_deployments', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_playbook_deployments_playbook_id'), 'playbook_deployments', ['playbook_id'], unique=False)
|
||||
|
||||
# Создание таблицы dockerfiles
|
||||
op.create_table(
|
||||
'dockerfiles',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text()),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('base_image', sa.String()),
|
||||
sa.Column('tags', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.Column('status', sa.String(), server_default='active'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('created_by', sa.String()),
|
||||
sa.Column('updated_by', sa.String()),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text())),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_dockerfiles_id'), 'dockerfiles', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_dockerfiles_name'), 'dockerfiles', ['name'], unique=True)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_dockerfiles_name'), table_name='dockerfiles')
|
||||
op.drop_index(op.f('ix_dockerfiles_id'), table_name='dockerfiles')
|
||||
op.drop_table('dockerfiles')
|
||||
op.drop_index(op.f('ix_playbook_deployments_playbook_id'), table_name='playbook_deployments')
|
||||
op.drop_index(op.f('ix_playbook_deployments_id'), table_name='playbook_deployments')
|
||||
op.drop_table('playbook_deployments')
|
||||
op.drop_index(op.f('ix_playbook_test_runs_preset_name'), table_name='playbook_test_runs')
|
||||
op.drop_index(op.f('ix_playbook_test_runs_playbook_id'), table_name='playbook_test_runs')
|
||||
op.drop_index(op.f('ix_playbook_test_runs_id'), table_name='playbook_test_runs')
|
||||
op.drop_table('playbook_test_runs')
|
||||
op.drop_index(op.f('ix_playbooks_name'), table_name='playbooks')
|
||||
op.drop_index(op.f('ix_playbooks_id'), table_name='playbooks')
|
||||
op.drop_table('playbooks')
|
||||
132
app/alembic/versions/003_add_presets_table.py
Normal file
132
app/alembic/versions/003_add_presets_table.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""add presets table
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002_add_playbooks'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Создание таблицы presets
|
||||
op.create_table(
|
||||
'presets',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('category', sa.String(), nullable=True, server_default='main'),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('docker_network', sa.String(), nullable=True),
|
||||
sa.Column('hosts', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('images', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('systemd_defaults', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('kind_clusters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=True, server_default='active'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('created_by', sa.String(), nullable=True),
|
||||
sa.Column('updated_by', sa.String(), nullable=True),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_presets_id'), 'presets', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_presets_name'), 'presets', ['name'], unique=True)
|
||||
op.create_index(op.f('ix_presets_category'), 'presets', ['category'], unique=False)
|
||||
|
||||
# Основные preset'ы
|
||||
if presets_dir.exists():
|
||||
for preset_file in presets_dir.glob("*.yml"):
|
||||
if preset_file.name == "deploy.yml":
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(preset_file) as f:
|
||||
content = f.read()
|
||||
preset_data = yaml.safe_load(content) or {}
|
||||
|
||||
# Извлечение описания из комментария
|
||||
description = None
|
||||
for line in content.split('\n'):
|
||||
if line.strip().startswith('#description:'):
|
||||
description = line.split('#description:')[1].strip()
|
||||
break
|
||||
|
||||
# Подсчет хостов
|
||||
hosts_count = len(preset_data.get('hosts', []))
|
||||
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||||
VALUES (:name, :category, :description, :content, :docker_network, :hosts, :images, :systemd_defaults, :kind_clusters, :created_at, :updated_at)
|
||||
ON CONFLICT (name) DO NOTHING
|
||||
"""),
|
||||
{
|
||||
'name': preset_file.stem,
|
||||
'category': 'main',
|
||||
'description': description,
|
||||
'content': content,
|
||||
'docker_network': preset_data.get('docker_network'),
|
||||
'hosts': preset_data.get('hosts', []),
|
||||
'images': preset_data.get('images', {}),
|
||||
'systemd_defaults': preset_data.get('systemd_defaults', {}),
|
||||
'kind_clusters': preset_data.get('kind_clusters', []),
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Ошибка при импорте preset {preset_file.name}: {e}")
|
||||
|
||||
# K8s preset'ы
|
||||
if k8s_presets_dir.exists():
|
||||
for preset_file in k8s_presets_dir.glob("*.yml"):
|
||||
try:
|
||||
with open(preset_file) as f:
|
||||
content = f.read()
|
||||
preset_data = yaml.safe_load(content) or {}
|
||||
|
||||
# Извлечение описания из комментария
|
||||
description = None
|
||||
for line in content.split('\n'):
|
||||
if line.strip().startswith('#description:'):
|
||||
description = line.split('#description:')[1].strip()
|
||||
break
|
||||
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||||
VALUES (:name, :category, :description, :content, :docker_network, :hosts, :images, :systemd_defaults, :kind_clusters, :created_at, :updated_at)
|
||||
ON CONFLICT (name) DO NOTHING
|
||||
"""),
|
||||
{
|
||||
'name': preset_file.stem,
|
||||
'category': 'k8s',
|
||||
'description': description,
|
||||
'content': content,
|
||||
'docker_network': preset_data.get('docker_network'),
|
||||
'hosts': preset_data.get('hosts', []),
|
||||
'images': preset_data.get('images', {}),
|
||||
'systemd_defaults': preset_data.get('systemd_defaults', {}),
|
||||
'kind_clusters': preset_data.get('kind_clusters', []),
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Ошибка при импорте k8s preset {preset_file.name}: {e}")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index(op.f('ix_presets_category'), table_name='presets')
|
||||
op.drop_index(op.f('ix_presets_name'), table_name='presets')
|
||||
op.drop_index(op.f('ix_presets_id'), table_name='presets')
|
||||
op.drop_table('presets')
|
||||
222
app/alembic/versions/004_migrate_presets_and_dockerfiles.py
Normal file
222
app/alembic/versions/004_migrate_presets_and_dockerfiles.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Migrate presets and dockerfiles from filesystem to database
|
||||
|
||||
Revision ID: 004
|
||||
Revises: 003
|
||||
Create Date: 2024-01-03 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import yaml
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Перенос preset'ов и dockerfiles из файловой системы в БД"""
|
||||
connection = op.get_bind()
|
||||
|
||||
# Получаем путь к папке presets относительно файла миграции
|
||||
# Пресеты теперь находятся в alembic/presets
|
||||
alembic_dir = Path(__file__).parent.parent
|
||||
presets_dir = alembic_dir / "presets"
|
||||
k8s_presets_dir = presets_dir / "k8s"
|
||||
|
||||
# Если не найдено в alembic, пробуем старый путь (для обратной совместимости)
|
||||
if not presets_dir.exists():
|
||||
import os
|
||||
project_root = Path(os.getenv("PROJECT_ROOT", "/workspace"))
|
||||
old_presets_dir = project_root / "molecule" / "presets"
|
||||
if old_presets_dir.exists():
|
||||
presets_dir = old_presets_dir
|
||||
k8s_presets_dir = presets_dir / "k8s"
|
||||
|
||||
# Основные preset'ы
|
||||
if presets_dir.exists():
|
||||
for preset_file in presets_dir.glob("*.yml"):
|
||||
if preset_file.name == "deploy.yml":
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(preset_file) as f:
|
||||
content = f.read()
|
||||
preset_data = yaml.safe_load(content) or {}
|
||||
|
||||
# Извлечение описания из комментария
|
||||
description = None
|
||||
for line in content.split('\n'):
|
||||
if line.strip().startswith('#description:'):
|
||||
description = line.split('#description:')[1].strip()
|
||||
break
|
||||
|
||||
# Проверка существования в БД
|
||||
result = connection.execute(
|
||||
sa.text("SELECT id FROM presets WHERE name = :name"),
|
||||
{"name": preset_file.stem}
|
||||
)
|
||||
if result.fetchone():
|
||||
continue
|
||||
|
||||
# Преобразуем dict/list в JSON строки для PostgreSQL
|
||||
hosts_json = json.dumps(preset_data.get('hosts', []))
|
||||
images_json = json.dumps(preset_data.get('images', {}))
|
||||
systemd_defaults_json = json.dumps(preset_data.get('systemd_defaults', {}))
|
||||
kind_clusters_json = json.dumps(preset_data.get('kind_clusters', []))
|
||||
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||||
VALUES (:name, :category, :description, :content, :docker_network, CAST(:hosts AS jsonb), CAST(:images AS jsonb), CAST(:systemd_defaults AS jsonb), CAST(:kind_clusters AS jsonb), :created_at, :updated_at)
|
||||
"""),
|
||||
{
|
||||
'name': preset_file.stem,
|
||||
'category': 'main',
|
||||
'description': description,
|
||||
'content': content,
|
||||
'docker_network': preset_data.get('docker_network'),
|
||||
'hosts': hosts_json,
|
||||
'images': images_json,
|
||||
'systemd_defaults': systemd_defaults_json,
|
||||
'kind_clusters': kind_clusters_json,
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Ошибка при импорте preset {preset_file.name}: {e}")
|
||||
|
||||
# K8s preset'ы
|
||||
if k8s_presets_dir.exists():
|
||||
for preset_file in k8s_presets_dir.glob("*.yml"):
|
||||
try:
|
||||
with open(preset_file) as f:
|
||||
content = f.read()
|
||||
preset_data = yaml.safe_load(content) or {}
|
||||
|
||||
# Извлечение описания из комментария
|
||||
description = None
|
||||
for line in content.split('\n'):
|
||||
if line.strip().startswith('#description:'):
|
||||
description = line.split('#description:')[1].strip()
|
||||
break
|
||||
|
||||
# Проверка существования в БД
|
||||
result = connection.execute(
|
||||
sa.text("SELECT id FROM presets WHERE name = :name"),
|
||||
{"name": preset_file.stem}
|
||||
)
|
||||
if result.fetchone():
|
||||
continue
|
||||
|
||||
# Преобразуем dict/list в JSON строки для PostgreSQL
|
||||
hosts_json = json.dumps(preset_data.get('hosts', []))
|
||||
images_json = json.dumps(preset_data.get('images', {}))
|
||||
systemd_defaults_json = json.dumps(preset_data.get('systemd_defaults', {}))
|
||||
kind_clusters_json = json.dumps(preset_data.get('kind_clusters', []))
|
||||
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO presets (name, category, description, content, docker_network, hosts, images, systemd_defaults, kind_clusters, created_at, updated_at)
|
||||
VALUES (:name, :category, :description, :content, :docker_network, CAST(:hosts AS jsonb), CAST(:images AS jsonb), CAST(:systemd_defaults AS jsonb), CAST(:kind_clusters AS jsonb), :created_at, :updated_at)
|
||||
"""),
|
||||
{
|
||||
'name': preset_file.stem,
|
||||
'category': 'k8s',
|
||||
'description': description,
|
||||
'content': content,
|
||||
'docker_network': preset_data.get('docker_network'),
|
||||
'hosts': hosts_json,
|
||||
'images': images_json,
|
||||
'systemd_defaults': systemd_defaults_json,
|
||||
'kind_clusters': kind_clusters_json,
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Ошибка при импорте k8s preset {preset_file.name}: {e}")
|
||||
|
||||
# ========== ПЕРЕНОС DOCKERFILES ==========
|
||||
# Dockerfiles теперь находятся в alembic/dockerfiles
|
||||
# Используем путь относительно файла миграции
|
||||
alembic_dir = Path(__file__).parent.parent
|
||||
dockerfiles_dir = alembic_dir / "dockerfiles"
|
||||
|
||||
# Если не найдено, пробуем альтернативные пути
|
||||
if not dockerfiles_dir.exists():
|
||||
alt_paths = [
|
||||
project_root / "app" / "alembic" / "dockerfiles",
|
||||
Path("/app/app/alembic/dockerfiles"),
|
||||
]
|
||||
for alt_path in alt_paths:
|
||||
if alt_path.exists():
|
||||
dockerfiles_dir = alt_path
|
||||
break
|
||||
|
||||
if dockerfiles_dir.exists():
|
||||
for dockerfile_path in dockerfiles_dir.rglob("Dockerfile*"):
|
||||
if not dockerfile_path.is_file():
|
||||
continue
|
||||
|
||||
try:
|
||||
# Имя из пути (например, ubuntu22/Dockerfile -> ubuntu22)
|
||||
relative_path = dockerfile_path.relative_to(dockerfiles_dir)
|
||||
name = str(relative_path.parent) if relative_path.parent != Path('.') else relative_path.stem
|
||||
|
||||
# Пропускаем специальные файлы
|
||||
if name in ['ansible-controller', 'k8s', 'k8s-portforward']:
|
||||
continue
|
||||
|
||||
# Проверка существования в БД
|
||||
result = connection.execute(
|
||||
sa.text("SELECT id FROM dockerfiles WHERE name = :name"),
|
||||
{"name": name}
|
||||
)
|
||||
if result.fetchone():
|
||||
continue
|
||||
|
||||
content = dockerfile_path.read_text(encoding='utf-8')
|
||||
|
||||
# Определяем базовый образ из содержимого
|
||||
base_image = None
|
||||
for line in content.split('\n'):
|
||||
if line.strip().startswith('FROM'):
|
||||
base_image = line.strip().replace('FROM', '').strip().split()[0]
|
||||
break
|
||||
|
||||
# Платформы по умолчанию: linux/amd64 (x86_64), linux/386 (x86) и linux/arm64 (macOS M1)
|
||||
default_platforms = ["linux/amd64", "linux/386", "linux/arm64"]
|
||||
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO dockerfiles (name, description, content, base_image, tags, platforms, status, created_at, updated_at)
|
||||
VALUES (:name, :description, :content, :base_image, :tags, :platforms, :status, :created_at, :updated_at)
|
||||
"""),
|
||||
{
|
||||
'name': name,
|
||||
'description': f'Dockerfile for {name}',
|
||||
'content': content,
|
||||
'base_image': base_image,
|
||||
'tags': None,
|
||||
'platforms': default_platforms,
|
||||
'status': 'active',
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Ошибка при импорте dockerfile {dockerfile_path}: {e}")
|
||||
|
||||
|
||||
def downgrade():
|
||||
# При откате миграции данные остаются в БД
|
||||
# Удаление файлов не выполняется для безопасности
|
||||
pass
|
||||
47
app/alembic/versions/005_add_user_profiles.py
Normal file
47
app/alembic/versions/005_add_user_profiles.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Add user profiles table
|
||||
|
||||
Revision ID: 005
|
||||
Revises: 004
|
||||
Create Date: 2024-01-04 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '005'
|
||||
down_revision = '004'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Создание таблицы user_profiles
|
||||
op.create_table(
|
||||
'user_profiles',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('dockerhub_username', sa.String(), nullable=True),
|
||||
sa.Column('dockerhub_password', sa.Text(), nullable=True),
|
||||
sa.Column('dockerhub_repository', sa.String(), nullable=True),
|
||||
sa.Column('harbor_url', sa.String(), nullable=True),
|
||||
sa.Column('harbor_username', sa.String(), nullable=True),
|
||||
sa.Column('harbor_password', sa.Text(), nullable=True),
|
||||
sa.Column('harbor_project', sa.String(), nullable=True),
|
||||
sa.Column('email', sa.String(), nullable=True),
|
||||
sa.Column('full_name', sa.String(), nullable=True),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_user_profiles_id'), 'user_profiles', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_user_profiles_user_id'), 'user_profiles', ['user_id'], unique=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index(op.f('ix_user_profiles_user_id'), table_name='user_profiles')
|
||||
op.drop_index(op.f('ix_user_profiles_id'), table_name='user_profiles')
|
||||
op.drop_table('user_profiles')
|
||||
46
app/alembic/versions/006_add_dockerfile_platforms.py
Normal file
46
app/alembic/versions/006_add_dockerfile_platforms.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Add platforms column to dockerfiles table
|
||||
|
||||
Revision ID: 006
|
||||
Revises: 005
|
||||
Create Date: 2024-01-04 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import json
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '006'
|
||||
down_revision = '005'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Добавление поля platforms в таблицу dockerfiles"""
|
||||
connection = op.get_bind()
|
||||
|
||||
# Добавляем колонку platforms (JSON) с дефолтным значением
|
||||
op.add_column(
|
||||
'dockerfiles',
|
||||
sa.Column('platforms', postgresql.JSON(astext_type=sa.Text()), nullable=True)
|
||||
)
|
||||
|
||||
# Устанавливаем дефолтные значения для существующих записей
|
||||
# По умолчанию: linux/amd64 (x86_64), linux/386 (x86) и linux/arm64 (macOS M1)
|
||||
default_platforms = ["linux/amd64", "linux/386", "linux/arm64"]
|
||||
# Используем CAST для преобразования строки JSON в JSONB
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
UPDATE dockerfiles
|
||||
SET platforms = CAST(:platforms AS jsonb)
|
||||
WHERE platforms IS NULL
|
||||
"""),
|
||||
{"platforms": json.dumps(default_platforms)}
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Удаление поля platforms из таблицы dockerfiles"""
|
||||
op.drop_column('dockerfiles', 'platforms')
|
||||
49
app/alembic/versions/007_add_dockerfile_build_logs.py
Normal file
49
app/alembic/versions/007_add_dockerfile_build_logs.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Add dockerfile_build_logs table
|
||||
|
||||
Revision ID: 007
|
||||
Revises: 006
|
||||
Create Date: 2024-01-05 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '007'
|
||||
down_revision = '006'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Создание таблицы dockerfile_build_logs для хранения логов сборки"""
|
||||
op.create_table(
|
||||
'dockerfile_build_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('dockerfile_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_name', sa.String(), nullable=False),
|
||||
sa.Column('tag', sa.String(), server_default='latest', nullable=True),
|
||||
sa.Column('platforms', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=False, server_default='running'),
|
||||
sa.Column('logs', sa.Text(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('duration', sa.Integer(), nullable=True),
|
||||
sa.Column('returncode', sa.Integer(), nullable=True),
|
||||
sa.Column('user', sa.String(), nullable=True),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.ForeignKeyConstraint(['dockerfile_id'], ['dockerfiles.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_dockerfile_build_logs_dockerfile_id'), 'dockerfile_build_logs', ['dockerfile_id'], unique=False)
|
||||
op.create_index(op.f('ix_dockerfile_build_logs_image_name'), 'dockerfile_build_logs', ['image_name'], unique=False)
|
||||
op.create_index(op.f('ix_dockerfile_build_logs_started_at'), 'dockerfile_build_logs', ['started_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Удаление таблицы dockerfile_build_logs"""
|
||||
op.drop_index(op.f('ix_dockerfile_build_logs_started_at'), table_name='dockerfile_build_logs')
|
||||
op.drop_index(op.f('ix_dockerfile_build_logs_image_name'), table_name='dockerfile_build_logs')
|
||||
op.drop_index(op.f('ix_dockerfile_build_logs_dockerfile_id'), table_name='dockerfile_build_logs')
|
||||
op.drop_table('dockerfile_build_logs')
|
||||
58
app/alembic/versions/008_add_roles_table.py
Normal file
58
app/alembic/versions/008_add_roles_table.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Add roles table
|
||||
|
||||
Revision ID: 008
|
||||
Revises: 007
|
||||
Create Date: 2024-01-06 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '008'
|
||||
down_revision = '007'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Создание таблицы roles для хранения Ansible ролей"""
|
||||
op.create_table(
|
||||
'roles',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('content', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('is_global', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('is_personal', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('groups', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('author', sa.String(), nullable=True),
|
||||
sa.Column('platforms', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('galaxy_info', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('status', sa.String(), server_default='active', nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('created_by', sa.String(), nullable=True),
|
||||
sa.Column('updated_by', sa.String(), nullable=True),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index(op.f('ix_roles_name'), 'roles', ['name'], unique=True)
|
||||
op.create_index(op.f('ix_roles_is_global'), 'roles', ['is_global'], unique=False)
|
||||
op.create_index(op.f('ix_roles_is_personal'), 'roles', ['is_personal'], unique=False)
|
||||
op.create_index(op.f('ix_roles_user_id'), 'roles', ['user_id'], unique=False)
|
||||
op.create_index(op.f('ix_roles_created_at'), 'roles', ['created_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Удаление таблицы roles"""
|
||||
op.drop_index(op.f('ix_roles_created_at'), table_name='roles')
|
||||
op.drop_index(op.f('ix_roles_user_id'), table_name='roles')
|
||||
op.drop_index(op.f('ix_roles_is_personal'), table_name='roles')
|
||||
op.drop_index(op.f('ix_roles_is_global'), table_name='roles')
|
||||
op.drop_index(op.f('ix_roles_name'), table_name='roles')
|
||||
op.drop_table('roles')
|
||||
200
app/alembic/versions/009_migrate_roles_to_db.py
Normal file
200
app/alembic/versions/009_migrate_roles_to_db.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Migrate roles from filesystem to database
|
||||
|
||||
Revision ID: 009
|
||||
Revises: 008
|
||||
Create Date: 2024-01-06 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import json
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '009'
|
||||
down_revision = '008'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Перенос ролей из файловой системы в БД и в alembic/roles/"""
|
||||
connection = op.get_bind()
|
||||
|
||||
# Определяем пути
|
||||
alembic_dir = Path(__file__).parent.parent
|
||||
roles_dir_alembic = alembic_dir / "roles"
|
||||
roles_dir_alembic.mkdir(exist_ok=True)
|
||||
|
||||
# Определяем исходную папку с ролями
|
||||
project_root = Path(os.getenv("PROJECT_ROOT", "/workspace"))
|
||||
roles_dir_source = project_root / "roles"
|
||||
|
||||
# Если исходная папка не найдена, пробуем относительный путь
|
||||
if not roles_dir_source.exists():
|
||||
# Пробуем найти относительно alembic
|
||||
possible_paths = [
|
||||
alembic_dir.parent.parent / "roles",
|
||||
Path.cwd() / "roles",
|
||||
Path("/workspace") / "roles"
|
||||
]
|
||||
for path in possible_paths:
|
||||
if path.exists():
|
||||
roles_dir_source = path
|
||||
break
|
||||
|
||||
if not roles_dir_source.exists():
|
||||
print(f"⚠️ Папка roles не найдена: {roles_dir_source}")
|
||||
return
|
||||
|
||||
print(f"📁 Исходная папка ролей: {roles_dir_source}")
|
||||
print(f"📁 Целевая папка ролей: {roles_dir_alembic}")
|
||||
|
||||
# Функция для чтения файла с обработкой ошибок
|
||||
def read_file_safe(file_path: Path) -> str:
|
||||
try:
|
||||
return file_path.read_text(encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f"⚠️ Ошибка чтения файла {file_path}: {e}")
|
||||
return ""
|
||||
|
||||
# Функция для сбора всех файлов роли
|
||||
def collect_role_files(role_dir: Path) -> dict:
|
||||
"""Собирает все файлы роли в словарь {relative_path: content}"""
|
||||
role_content = {}
|
||||
|
||||
# Стандартные файлы и папки
|
||||
standard_files = {
|
||||
"tasks/main.yml": "tasks/main.yml",
|
||||
"handlers/main.yml": "handlers/main.yml",
|
||||
"defaults/main.yml": "defaults/main.yml",
|
||||
"vars/main.yml": "vars/main.yml",
|
||||
"meta/main.yml": "meta/main.yml",
|
||||
"README.md": "README.md"
|
||||
}
|
||||
|
||||
# Читаем стандартные файлы
|
||||
for file_path, key in standard_files.items():
|
||||
full_path = role_dir / file_path
|
||||
if full_path.exists():
|
||||
role_content[key] = read_file_safe(full_path)
|
||||
|
||||
# Читаем все файлы из templates/
|
||||
templates_dir = role_dir / "templates"
|
||||
if templates_dir.exists():
|
||||
for template_file in templates_dir.rglob("*"):
|
||||
if template_file.is_file():
|
||||
rel_path = template_file.relative_to(role_dir)
|
||||
role_content[str(rel_path)] = read_file_safe(template_file)
|
||||
|
||||
# Читаем все файлы из files/
|
||||
files_dir = role_dir / "files"
|
||||
if files_dir.exists():
|
||||
for file_item in files_dir.rglob("*"):
|
||||
if file_item.is_file():
|
||||
rel_path = file_item.relative_to(role_dir)
|
||||
role_content[str(rel_path)] = read_file_safe(file_item)
|
||||
|
||||
# Читаем все файлы из library/ (если есть)
|
||||
library_dir = role_dir / "library"
|
||||
if library_dir.exists():
|
||||
for lib_file in library_dir.rglob("*"):
|
||||
if lib_file.is_file():
|
||||
rel_path = lib_file.relative_to(role_dir)
|
||||
role_content[str(rel_path)] = read_file_safe(lib_file)
|
||||
|
||||
return role_content
|
||||
|
||||
# Функция для извлечения метаданных из meta/main.yml
|
||||
def extract_metadata(role_content: dict) -> tuple:
|
||||
"""Извлекает метаданные из meta/main.yml"""
|
||||
meta_content = role_content.get("meta/main.yml", "")
|
||||
if not meta_content:
|
||||
return None, None, None
|
||||
|
||||
try:
|
||||
meta_data = yaml.safe_load(meta_content)
|
||||
if not meta_data or not isinstance(meta_data, dict):
|
||||
return None, None, None
|
||||
|
||||
galaxy_info = meta_data.get("galaxy_info", {})
|
||||
author = galaxy_info.get("author", "")
|
||||
description = galaxy_info.get("description", "")
|
||||
platforms = galaxy_info.get("platforms", [])
|
||||
|
||||
return author, description, platforms
|
||||
except Exception as e:
|
||||
print(f"⚠️ Ошибка парсинга meta/main.yml: {e}")
|
||||
return None, None, None
|
||||
|
||||
# Обрабатываем каждую роль
|
||||
migrated_count = 0
|
||||
for role_dir in roles_dir_source.iterdir():
|
||||
if not role_dir.is_dir() or role_dir.name.startswith('.'):
|
||||
continue
|
||||
|
||||
role_name = role_dir.name
|
||||
print(f"📦 Обработка роли: {role_name}")
|
||||
|
||||
# Собираем все файлы роли
|
||||
role_content = collect_role_files(role_dir)
|
||||
|
||||
if not role_content:
|
||||
print(f"⚠️ Роль {role_name} не содержит файлов, пропускаем")
|
||||
continue
|
||||
|
||||
# Извлекаем метаданные
|
||||
author, description, platforms = extract_metadata(role_content)
|
||||
|
||||
# Копируем роль в alembic/roles/
|
||||
target_role_dir = roles_dir_alembic / role_name
|
||||
target_role_dir.mkdir(exist_ok=True)
|
||||
|
||||
# Копируем структуру папок и файлов
|
||||
for rel_path, content in role_content.items():
|
||||
target_file = target_role_dir / rel_path
|
||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
target_file.write_text(content, encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f"⚠️ Ошибка записи файла {target_file}: {e}")
|
||||
|
||||
# Сохраняем в БД
|
||||
try:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO roles (name, description, content, is_global, is_personal, author, platforms, galaxy_info, status, created_at, updated_at)
|
||||
VALUES (:name, :description, :content, :is_global, :is_personal, :author, :platforms, :galaxy_info, :status, :created_at, :updated_at)
|
||||
ON CONFLICT (name) DO NOTHING
|
||||
"""),
|
||||
{
|
||||
'name': role_name,
|
||||
'description': description or f"Роль {role_name}",
|
||||
'content': json.dumps(role_content),
|
||||
'is_global': True, # По умолчанию все роли глобальные
|
||||
'is_personal': False,
|
||||
'author': author,
|
||||
'platforms': json.dumps(platforms) if platforms else None,
|
||||
'galaxy_info': json.dumps({"galaxy_info": {"author": author, "description": description, "platforms": platforms}}) if author or description or platforms else None,
|
||||
'status': 'active',
|
||||
'created_at': datetime.utcnow(),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
migrated_count += 1
|
||||
print(f"✅ Роль {role_name} успешно мигрирована")
|
||||
except Exception as e:
|
||||
print(f"❌ Ошибка при миграции роли {role_name}: {e}")
|
||||
|
||||
print(f"\n✅ Миграция завершена. Перенесено ролей: {migrated_count}")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Откат миграции - удаление ролей из БД"""
|
||||
connection = op.get_bind()
|
||||
connection.execute(sa.text("DELETE FROM roles"))
|
||||
print("⚠️ Роли удалены из БД. Файлы в alembic/roles/ остаются для безопасности.")
|
||||
Reference in New Issue
Block a user