Skip to content

Commit b1e9744

Browse files
Merge pull request #303 from Sanketika-Obsrv/data-schema-fix
fix: #OBS-I406 storage type support read from envs
2 parents 655bb5f + d107ebd commit b1e9744

File tree

7 files changed

+25
-6
lines changed

7 files changed

+25
-6
lines changed

api-service/src/configs/Config.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,5 +118,6 @@ export const config = {
118118
"otel": {
119119
"enable": process.env.otel_enable || "false",
120120
"collector_endpoint": process.env.otel_collector_endpoint || "http://localhost:4318"
121-
}
121+
},
122+
"storage_types": process.env.storage_types || 'druid,datalake'
122123
}

api-service/src/controllers/DatasetCopy/DatasetCopy.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { ResponseHandler } from "../../helpers/ResponseHandler";
44
import * as _ from "lodash";
55
import { schemaValidation } from "../../services/ValidationService";
66
import validationSchema from "./RequestValidationSchema.json";
7-
import { datasetService, getLiveDatasetConfigs } from "../../services/DatasetService";
7+
import { datasetService, getLiveDatasetConfigs, validateStorageSupport } from "../../services/DatasetService";
88
import { updateRecords } from "./DatasetCopyHelper";
99
import { obsrvError } from "../../types/ObsrvError";
1010

@@ -40,6 +40,7 @@ const datasetCopy = async (req: Request, res: Response) => {
4040
validateRequest(req);
4141
const newDatasetId = _.get(req, "body.request.destination.datasetId");
4242
const dataset = await fetchDataset(req);
43+
validateStorageSupport(dataset);
4344
const userID = (req as any)?.userID;
4445
_.set(dataset, "created_by", userID);
4546
_.set(dataset, "updated_by", userID);

api-service/src/controllers/DatasetCreate/DatasetCreate.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import _ from "lodash";
22
import { Request, Response } from "express";
33
import httpStatus from "http-status";
4-
import { datasetService } from "../../services/DatasetService";
4+
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
55
import DatasetCreate from "./DatasetCreateValidationSchema.json";
66
import { schemaValidation } from "../../services/ValidationService";
77
import { ResponseHandler } from "../../helpers/ResponseHandler";
@@ -28,6 +28,7 @@ const validateRequest = async (req: Request) => {
2828
throw obsrvError(datasetId, "DATASET_DUPLICATE_DENORM_KEY", "Duplicate denorm output fields found.", "BAD_REQUEST", 400, undefined, {duplicateKeys: duplicateDenormKeys})
2929
}
3030

31+
validateStorageSupport(_.get(req, ["body", "request"]))
3132
}
3233

3334
const datasetCreate = async (req: Request, res: Response) => {

api-service/src/controllers/DatasetImport/DatasetImport.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { Request, Response } from "express";
22
import { ResponseHandler } from "../../helpers/ResponseHandler";
33
import httpStatus from "http-status";
44
import _ from "lodash";
5-
import { datasetService } from "../../services/DatasetService";
5+
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
66
import { datasetImportValidation, migrateExportedDatasetV1 } from "./DatasetImportHelper";
77
import { obsrvError } from "../../types/ObsrvError";
88

@@ -21,6 +21,7 @@ const datasetImport = async (req: Request, res: Response) => {
2121
const { updatedDataset, ignoredFields } = await datasetImportValidation({ ...requestBody, "request": datasetPayload })
2222
const { successMsg, partialIgnored } = getResponseData(ignoredFields)
2323

24+
validateStorageSupport(updatedDataset);
2425
const dataset = await importDataset(updatedDataset, overwrite, userID);
2526
ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: successMsg, data: dataset, ...(!_.isEmpty(partialIgnored) && { ignoredFields: partialIgnored }) } });
2627
}

api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { Request, Response } from "express";
22
import _ from "lodash";
33
import { ResponseHandler } from "../../helpers/ResponseHandler";
4-
import { datasetService } from "../../services/DatasetService";
4+
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
55
import { schemaValidation } from "../../services/ValidationService";
66
import StatusTransitionSchema from "./RequestValidationSchema.json";
77
import ReadyToPublishSchema from "./ReadyToPublishSchema.json"
@@ -88,6 +88,7 @@ const deleteDataset = async (dataset: Record<string, any>) => {
8888
const readyForPublish = async (dataset: Record<string, any>, updated_by: any) => {
8989

9090
const draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id)
91+
validateStorageSupport(draftDataset);
9192
let defaultConfigs: any = _.cloneDeep(defaultDatasetConfig)
9293
defaultConfigs = _.omit(defaultConfigs, ["router_config"])
9394
defaultConfigs = _.omit(defaultConfigs, "dedup_config.dedup_key");
@@ -136,6 +137,7 @@ const readyForPublish = async (dataset: Record<string, any>, updated_by: any) =>
136137
const publishDataset = async (dataset: Record<string, any>, userID: any) => {
137138

138139
const draftDataset: Record<string, any> = await datasetService.getDraftDataset(dataset.dataset_id) as unknown as Record<string, any>
140+
validateStorageSupport(draftDataset);
139141
_.set(draftDataset, ["created_by"], userID);
140142
_.set(draftDataset, ["updated_by"], userID);
141143
await validateAndUpdateDenormConfig(draftDataset);

api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import Model from "sequelize/types/model";
55
import { DatasetStatus } from "../../types/DatasetModels";
66
import { ResponseHandler } from "../../helpers/ResponseHandler";
77
import { cipherService } from "../../services/CipherService";
8-
import { datasetService } from "../../services/DatasetService";
8+
import { datasetService, validateStorageSupport } from "../../services/DatasetService";
99
import { schemaValidation } from "../../services/ValidationService";
1010
import DatasetUpdate from "./DatasetUpdateValidationSchema.json";
1111
import { obsrvError } from "../../types/ObsrvError";
@@ -30,6 +30,7 @@ const validateRequest = async (req: Request) => {
3030
throw obsrvError(datasetId, "DATASET_UPDATE_NO_FIELDS", "Provide atleast one field in addition to the dataset_id to update the dataset", "BAD_REQUEST", 400)
3131
}
3232

33+
validateStorageSupport(_.get(req, ["body", "request"]))
3334
}
3435

3536
const validateDataset = (dataset: Record<string, any> | null, req: Request) => {

api-service/src/services/DatasetService.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import { obsrvError } from "../types/ObsrvError";
1818
import { druidHttpService } from "../connections/druidConnection";
1919
import { tableGenerator } from "./TableGenerator";
2020
import { deleteAlertByDataset, deleteMetricAliasByDataset } from "./managers";
21+
import { config } from "../configs/Config";
2122

2223
class DatasetService {
2324

@@ -411,4 +412,15 @@ export const getV1Connectors = async (datasetId: string) => {
411412
return modifiedV1Connectors;
412413
}
413414

415+
const storageTypes = _.split(config.storage_types, ",")
416+
export const validateStorageSupport = (dataset: Record<string, any>) => {
417+
const { olap_store_enabled, lakehouse_enabled } = _.get(dataset, ["dataset_config", "indexing_config"]) || {}
418+
if (olap_store_enabled && !_.includes(storageTypes, "druid")) {
419+
throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "olap_store" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400)
420+
}
421+
if (lakehouse_enabled && !_.includes(storageTypes, "datalake")) {
422+
throw obsrvError("", "DATASET_UNSUPPORTED_STORAGE_TYPE", `The storage type "datalake" is not available. Please use one of the available storage types: ${storageTypes}`, "BAD_REQUEST", 400)
423+
}
424+
}
425+
414426
export const datasetService = new DatasetService();

0 commit comments

Comments
 (0)