Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,6 @@ jobs:
matrix:
version:
[
"24.8.11.51285.altinitystable",
"25.3.6.10034.altinitystable",
]

Expand Down
12 changes: 6 additions & 6 deletions docker-compose.gcb.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ services:
KAFKA_LOG4J_ROOT_LOGLEVEL: "WARN"
KAFKA_TOOLS_LOG4J_LOGLEVEL: "WARN"
clickhouse:
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse.local
extra_hosts:
- "clickhouse.local:127.0.0.1" # Add entry to /etc/hosts file
Expand All @@ -92,7 +92,7 @@ services:
clickhouse-query:
depends_on:
- zookeeper
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse-query.local
extra_hosts:
- "clickhouse-query.local:127.0.0.1" # Add entry to /etc/hosts file
Expand All @@ -108,7 +108,7 @@ services:
clickhouse-01:
depends_on:
- zookeeper
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse-01.local
extra_hosts:
- "clickhouse-01.local:127.0.0.1" # Add entry to /etc/hosts file
Expand All @@ -125,7 +125,7 @@ services:
clickhouse-02:
depends_on:
- zookeeper
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse-02.local
extra_hosts:
- "clickhouse-02.local:127.0.0.1" # Add entry to /etc/hosts file
Expand All @@ -142,7 +142,7 @@ services:
clickhouse-03:
depends_on:
- zookeeper
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse-03.local
extra_hosts:
- "clickhouse-03.local:127.0.0.1" # Add entry to /etc/hosts file
Expand All @@ -159,7 +159,7 @@ services:
clickhouse-04:
depends_on:
- zookeeper
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable}"
image: "${CLICKHOUSE_IMAGE:-ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:25.3.6.10034.altinitystable}"
hostname: clickhouse-04.local
extra_hosts:
- "clickhouse-04.local:127.0.0.1" # Add entry to /etc/hosts file
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
from snuba.clickhouse.columns import JSON, Column
from snuba.clusters.storage_sets import StorageSetKey
from snuba.migrations import migration, operations
from snuba.migrations.columns import MigrationModifiers as Modifiers
from snuba.migrations.operations import OperationTarget

storage_set = StorageSetKey.EVENTS_ANALYTICS_PLATFORM
table_name_prefix = "eap_items_1"
new_columns = [
Column(
"attributes_array",
JSON(
max_dynamic_paths=128,
modifiers=Modifiers(
codecs=[
"ZSTD(1)",
],
),
),
),
]
after = "attributes_float_39"
sampling_weights = [8, 8**2, 8**3]


class Migration(migration.ClickhouseNodeMigration):
blocking = False

def forwards_ops(self) -> list[operations.SqlOperation]:
ops: list[operations.SqlOperation] = [
operations.AddColumn(
storage_set=storage_set,
table_name=f"{table_name_prefix}_{suffix}",
column=new_column,
after=after,
target=target,
)
for suffix, target in [
("local", OperationTarget.LOCAL),
("dist", OperationTarget.DISTRIBUTED),
]
for new_column in new_columns
]

for sampling_weight in sampling_weights:
downsampled_table_prefix = f"eap_items_1_downsample_{sampling_weight}"

ops.extend(
[
operations.AddColumn(
storage_set=storage_set,
table_name=f"{downsampled_table_prefix}_{suffix}",
column=new_column,
after=after,
target=target,
)
for suffix, target in [
("local", OperationTarget.LOCAL),
("dist", OperationTarget.DISTRIBUTED),
]
for new_column in new_columns
]
)

return ops

def backwards_ops(self) -> list[operations.SqlOperation]:
ops: list[operations.SqlOperation] = [
operations.DropColumn(
storage_set=storage_set,
table_name=f"{table_name_prefix}_{suffix}",
column_name=new_column.name,
target=target,
)
for suffix, target in [
("dist", OperationTarget.DISTRIBUTED),
("local", OperationTarget.LOCAL),
]
for new_column in new_columns
]

for sampling_weight in sampling_weights:
downsampled_table_prefix = f"eap_items_1_downsample_{sampling_weight}"

ops.extend(
[
operations.DropColumn(
storage_set=storage_set,
table_name=f"{downsampled_table_prefix}_{suffix}",
column_name=new_column.name,
target=target,
)
for suffix, target in [
("dist", OperationTarget.DISTRIBUTED),
("local", OperationTarget.LOCAL),
]
for new_column in new_columns
]
)

return ops
Loading