Sergey Antropoff 7877590440 feat: Initial infrastructure setup for SensusAgent and SensusWorker
- Add Kafka with SSL support and KafkaUI with authentication
- Add PostgreSQL with initialization scripts
- Add ClickHouse cluster with 2 replicas and load balancer
- Add Nginx load balancer for ClickHouse
- Add comprehensive Makefile for infrastructure management
- Add SSL certificate generation for Kafka
- Add complete documentation and quickstart guide
- Add environment configuration with real values

Author: Сергей Антропов
Site: https://devops.org.ru
2025-09-10 11:20:47 +03:00

129 lines
4.9 KiB
XML
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

<!-- Автор: Сергей Антропов, сайт: https://devops.org.ru -->
<!-- Назначение: Конфигурация таблиц метрик для ClickHouse -->
<!-- Определение структуры таблиц для хранения метрик от SensusAgent -->
<yandex>
<!-- Настройки для таблиц метрик -->
<dictionaries_config>*_dictionary.xml</dictionaries_config>
<!-- Настройки для работы с метриками -->
<format_schema_path>/var/lib/clickhouse/format_schemas/</format_schema_path>
<!-- Настройки для сжатия данных -->
<compression>
<case>
<min_part_size>10000000000</min_part_size>
<min_part_size_ratio>0.01</min_part_size_ratio>
<method>lz4</method>
</case>
</compression>
<!-- Настройки для партиционирования -->
<part_log>
<database>system</database>
<table>part_log</table>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</part_log>
<!-- Настройки для работы с большими запросами -->
<max_query_size>268435456</max_query_size>
<max_ast_depth>1000</max_ast_depth>
<max_ast_elements>50000</max_ast_elements>
<max_expanded_ast_elements>500000</max_expanded_ast_elements>
<!-- Настройки для работы с временными данными -->
<tmp_path>/var/lib/clickhouse/tmp/</tmp_path>
<user_files_path>/var/lib/clickhouse/user_files/</user_files_path>
<format_schema_path>/var/lib/clickhouse/format_schemas/</format_schema_path>
<!-- Настройки для работы с репликацией -->
<distributed_ddl>
<path>/clickhouse/task_queue/ddl</path>
</distributed_ddl>
<!-- Настройки для работы с метриками системы -->
<system_log>
<database>system</database>
<table>query_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>query_thread_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>part_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>trace_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>metric_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>asynchronous_metric_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>session_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<system_log>
<database>system</database>
<table>text_log</table>
<partition_by>toYYYYMM(event_date)</partition_by>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
</system_log>
<!-- Настройки для работы с внешними источниками данных -->
<external_table_functions_whitelist>
<function>url</function>
<function>file</function>
<function>mysql</function>
<function>postgresql</function>
<function>odbc</function>
<function>jdbc</function>
<function>hdfs</function>
<function>s3</function>
</external_table_functions_whitelist>
<!-- Настройки для работы с Kafka -->
<kafka>
<broker_list>kafka:29092</broker_list>
<security_protocol>plaintext</security_protocol>
<topic_list>
<topic>sensus.metrics</topic>
</topic_list>
<consumer_group>clickhouse-consumer</consumer_group>
<num_consumers>1</num_consumers>
<max_block_size>1048576</max_block_size>
<max_poll_interval_ms>30000</max_poll_interval_ms>
<session_timeout_ms>30000</session_timeout_ms>
<auto_offset_reset>latest</auto_offset_reset>
<skip_broken_messages>100</skip_broken_messages>
</kafka>
</yandex>