Skip to content
Draft
1 change: 1 addition & 0 deletions misc/python/materialize/mzcompose/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def get_minimal_system_parameters(
"enable_rbac_checks": "true",
"enable_reduce_mfp_fusion": "true",
"enable_refresh_every_mvs": "true",
"enable_repr_typecheck": "false",
"enable_cluster_schedule_refresh": "true",
"enable_sql_server_source": "true",
"enable_statement_lifecycle_logging": "true",
Expand Down
8 changes: 8 additions & 0 deletions src/adapter/src/optimize/copy_to.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ use mz_storage_types::connections::Connection;
use mz_storage_types::sinks::S3UploadInfo;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::normalize_lets::normalize_lets;
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};
use mz_transform::{StatisticsOracle, TransformCtx};
use timely::progress::Antichain;
Expand All @@ -48,6 +51,8 @@ use crate::optimize::{
pub struct Optimizer {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// A snapshot of the catalog state.
catalog: Arc<Catalog>,
/// A snapshot of the cluster that will run the dataflows.
Expand Down Expand Up @@ -75,6 +80,7 @@ impl Optimizer {
) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
catalog,
compute_instance,
select_id,
Expand Down Expand Up @@ -167,6 +173,7 @@ impl Optimize<HirRelationExpr> for Optimizer {
let mut transform_ctx = TransformCtx::local(
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
Some(self.select_id),
Expand Down Expand Up @@ -344,6 +351,7 @@ impl<'s> Optimize<LocalMirPlan<Resolved<'s>>> for Optimizer {
&*stats,
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
);
Expand Down
7 changes: 7 additions & 0 deletions src/adapter/src/optimize/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ use mz_transform::TransformCtx;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::normalize_lets::normalize_lets;
use mz_transform::notice::{IndexAlreadyExists, IndexKeyEmpty};
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};

use crate::optimize::dataflows::{
Expand All @@ -51,6 +54,8 @@ use crate::optimize::{
pub struct Optimizer {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// A snapshot of the catalog state.
catalog: Arc<dyn OptimizerCatalog>,
/// A snapshot of the cluster that will run the dataflows.
Expand All @@ -75,6 +80,7 @@ impl Optimizer {
) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
catalog,
compute_instance,
exported_index_id,
Expand Down Expand Up @@ -177,6 +183,7 @@ impl Optimize<Index> for Optimizer {
&mz_transform::EmptyStatisticsOracle, // TODO: wire proper stats
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
);
Expand Down
8 changes: 8 additions & 0 deletions src/adapter/src/optimize/materialized_view.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ use mz_sql::plan::HirRelationExpr;
use mz_transform::TransformCtx;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::normalize_lets::normalize_lets;
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};
use timely::progress::Antichain;

Expand All @@ -56,6 +59,8 @@ use crate::optimize::{
pub struct Optimizer {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// A snapshot of the catalog state.
catalog: Arc<dyn OptimizerCatalog>,
/// A snapshot of the cluster that will run the dataflows.
Expand Down Expand Up @@ -115,6 +120,7 @@ impl Optimizer {
) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
catalog,
compute_instance,
sink_id,
Expand Down Expand Up @@ -197,6 +203,7 @@ impl Optimize<HirRelationExpr> for Optimizer {
let mut transform_ctx = TransformCtx::local(
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
Some(self.view_id),
Expand Down Expand Up @@ -286,6 +293,7 @@ impl Optimize<LocalMirPlan> for Optimizer {
&mz_transform::EmptyStatisticsOracle, // TODO: wire proper stats
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
);
Expand Down
8 changes: 8 additions & 0 deletions src/adapter/src/optimize/peek.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ use mz_sql::plan::HirRelationExpr;
use mz_sql::session::metadata::SessionMetadata;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::normalize_lets::normalize_lets;
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};
use mz_transform::{StatisticsOracle, TransformCtx};
use timely::progress::Antichain;
Expand All @@ -45,6 +48,8 @@ use crate::optimize::{
pub struct Optimizer {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// A snapshot of the catalog state.
catalog: Arc<Catalog>,
/// A snapshot of the cluster that will run the dataflows.
Expand Down Expand Up @@ -75,6 +80,7 @@ impl Optimizer {
) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
catalog,
compute_instance,
finishing,
Expand Down Expand Up @@ -181,6 +187,7 @@ impl Optimize<HirRelationExpr> for Optimizer {
let mut transform_ctx = TransformCtx::local(
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
Some(self.select_id),
Expand Down Expand Up @@ -336,6 +343,7 @@ impl<'s> Optimize<LocalMirPlan<Resolved<'s>>> for Optimizer {
&*stats,
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
);
Expand Down
8 changes: 8 additions & 0 deletions src/adapter/src/optimize/subscribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ use mz_sql::plan::SubscribeFrom;
use mz_transform::TransformCtx;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::normalize_lets::normalize_lets;
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};
use timely::progress::Antichain;

Expand All @@ -42,6 +45,8 @@ use crate::optimize::{
pub struct Optimizer {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// A snapshot of the catalog state.
catalog: Arc<dyn OptimizerCatalog>,
/// A snapshot of the cluster that will run the dataflows.
Expand Down Expand Up @@ -95,6 +100,7 @@ impl Optimizer {
) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
catalog,
compute_instance,
view_id,
Expand Down Expand Up @@ -229,6 +235,7 @@ impl Optimize<SubscribeFrom> for Optimizer {
let mut transform_ctx = TransformCtx::local(
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
Some(self.view_id),
Expand Down Expand Up @@ -272,6 +279,7 @@ impl Optimize<SubscribeFrom> for Optimizer {
&mz_transform::EmptyStatisticsOracle, // TODO: wire proper stats
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
Some(&self.metrics),
);
Expand Down
8 changes: 8 additions & 0 deletions src/adapter/src/optimize/view.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ use mz_sql::optimizer_metrics::OptimizerMetrics;
use mz_sql::plan::HirRelationExpr;
use mz_transform::TransformCtx;
use mz_transform::dataflow::DataflowMetainfo;
use mz_transform::reprtypecheck::{
SharedContext as ReprTypecheckContext, empty_context as empty_repr_context,
};
use mz_transform::typecheck::{SharedContext as TypecheckContext, empty_context};

use crate::optimize::dataflows::{ExprPrepStyle, prep_relation_expr};
Expand All @@ -37,6 +40,8 @@ use crate::optimize::{
pub struct Optimizer<'a> {
/// A typechecking context to use throughout the optimizer pipeline.
typecheck_ctx: TypecheckContext,
/// A representation typechecking context to use throughout the optimizer pipeline.
repr_typecheck_ctx: ReprTypecheckContext,
/// Optimizer config.
config: OptimizerConfig,
/// Optimizer metrics.
Expand All @@ -54,6 +59,7 @@ impl<'a> Optimizer<'a> {
pub fn new(config: OptimizerConfig, metrics: Option<OptimizerMetrics>) -> Self {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
config,
metrics,
expr_prep_style: None,
Expand All @@ -71,6 +77,7 @@ impl<'a> Optimizer<'a> {
) -> Optimizer<'a> {
Self {
typecheck_ctx: empty_context(),
repr_typecheck_ctx: empty_repr_context(),
config,
metrics,
expr_prep_style: Some(expr_prep_style),
Expand All @@ -95,6 +102,7 @@ impl Optimize<HirRelationExpr> for Optimizer<'_> {
let mut transform_ctx = TransformCtx::local(
&self.config.features,
&self.typecheck_ctx,
&self.repr_typecheck_ctx,
&mut df_meta,
self.metrics.as_ref(),
None,
Expand Down
9 changes: 6 additions & 3 deletions src/expr/src/scalar/func/variadic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use mz_repr::adt::range::{InvalidRangeError, Range, RangeBound, parse_range_boun
use mz_repr::adt::system::Oid;
use mz_repr::adt::timestamp::CheckedTimestamp;
use mz_repr::role_id::RoleId;
use mz_repr::{ColumnName, Datum, Row, RowArena, SqlColumnType, SqlScalarType};
use mz_repr::{ColumnName, Datum, ReprScalarType, Row, RowArena, SqlColumnType, SqlScalarType};
use serde::{Deserialize, Serialize};
use sha1::Sha1;
use sha2::{Sha224, Sha256, Sha384, Sha512};
Expand Down Expand Up @@ -1321,8 +1321,11 @@ impl VariadicFunc {
.nullable(true),
ArrayCreate { elem_type } => {
debug_assert!(
input_types.iter().all(|t| t.scalar_type.base_eq(elem_type)),
"Args to ArrayCreate should have types that are compatible with the elem_type"
input_types
.iter()
.all(|t| ReprScalarType::from(&t.scalar_type)
== ReprScalarType::from(elem_type)),
"Args to ArrayCreate should have types that are repr-compatible with the elem_type"
);
match elem_type {
SqlScalarType::Array(_) => elem_type.clone().nullable(false),
Expand Down
14 changes: 13 additions & 1 deletion src/repr/src/explain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ use crate::explain::dot::{DisplayDot, dot_string};
use crate::explain::json::{DisplayJson, json_string};
use crate::explain::text::{DisplayText, text_string};
use crate::optimize::OptimizerFeatureOverrides;
use crate::{GlobalId, SqlColumnType, SqlScalarType};
use crate::{GlobalId, ReprColumnType, ReprScalarType, SqlColumnType, SqlScalarType};

pub mod dot;
pub mod json;
Expand Down Expand Up @@ -444,6 +444,12 @@ pub trait ExprHumanizer: fmt::Debug {
/// compatibility is more important.
fn humanize_scalar_type(&self, ty: &SqlScalarType, postgres_compat: bool) -> String;

/// Returns a human-readable name for the specified scalar type.
/// Calls `humanize_scalar_type` with the `SqlScalarType` representation of the specified type.
fn humanize_scalar_type_repr(&self, typ: &ReprScalarType, postgres_compat: bool) -> String {
self.humanize_scalar_type(&SqlScalarType::from_repr(typ), postgres_compat)
}

/// Returns a human-readable name for the specified column type.
/// Used in, e.g., EXPLAIN and error msgs, in which case exact Postgres compatibility is less
/// important than showing as much detail as possible. Also used in `pg_typeof`, where Postgres
Expand All @@ -456,6 +462,12 @@ pub trait ExprHumanizer: fmt::Debug {
)
}

/// Returns a human-readable name for the specified column type.
/// Calls `humanize_column_type` with the `SqlColumnType` representation of the specified type.
fn humanize_column_type_repr(&self, typ: &ReprColumnType, postgres_compat: bool) -> String {
self.humanize_column_type(&SqlColumnType::from_repr(typ), postgres_compat)
}

/// Returns a vector of column names for the relation identified by `id`.
fn column_names_for_id(&self, id: GlobalId) -> Option<Vec<String>>;

Expand Down
8 changes: 4 additions & 4 deletions src/repr/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ pub use crate::global_id::GlobalId;
pub use crate::relation::{
ColumnIndex, ColumnName, NotNullViolation, PropRelationDescDiff, ProtoColumnName,
ProtoColumnType, ProtoRelationDesc, ProtoRelationType, RelationDesc, RelationDescBuilder,
RelationVersion, RelationVersionSelector, ReprColumnType, SqlColumnType, SqlRelationType,
UNKNOWN_COLUMN_NAME, VersionedRelationDesc, arb_relation_desc_diff,
RelationVersion, RelationVersionSelector, ReprColumnType, ReprRelationType, SqlColumnType,
SqlRelationType, UNKNOWN_COLUMN_NAME, VersionedRelationDesc, arb_relation_desc_diff,
arb_relation_desc_projection, arb_row_for_relation,
};
pub use crate::row::encode::{RowColumnarDecoder, RowColumnarEncoder, preserves_order};
Expand All @@ -66,7 +66,7 @@ pub use crate::row::{
};
pub use crate::scalar::{
ArrayRustType, AsColumnType, Datum, DatumType, PropArray, PropDatum, PropDict, PropList,
ProtoScalarType, ReprScalarType, ScalarBaseType, SqlScalarType, arb_datum,
arb_datum_for_column, arb_datum_for_scalar, arb_range_type,
ProtoScalarType, ReprScalarBaseType, ReprScalarType, SqlScalarBaseType, SqlScalarType,
arb_datum, arb_datum_for_column, arb_datum_for_scalar, arb_range_type,
};
pub use crate::timestamp::{Timestamp, TimestampManipulation};
1 change: 1 addition & 0 deletions src/repr/src/optimize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ optimizer_feature_flags!({
// See the feature flag of the same name.
enable_dequadratic_eqprop_map: bool,
enable_fast_path_plan_insights: bool,
enable_repr_typecheck: bool,
});

/// A trait used to implement layered config construction.
Expand Down
Loading