Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion ingestion/tests/unit/test_workflow_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,12 @@ def test_parsing_matillion_pipeline(self):
parse_workflow_config_gracefully(config_dict)

self.assertIn(
"We encountered an error parsing the configuration of your MatillionConnection.\nYou might need to review your config based on the original cause of this failure:\n\t - Missing parameter in ('connection', 'hostPort')\n\t - Missing parameter in ('connection', 'username')\n\t - Missing parameter in ('connection', 'password')",
"We encountered an error parsing the configuration of your MatillionConnection.\n"
"You might need to review your config based on the original cause of this failure:\n"
"\t - Missing parameter in ('connection', 'function-after[parse_name(), MatillionEtlAuthConfig]', 'hostPort')\n"
"\t - Missing parameter in ('connection', 'function-after[parse_name(), MatillionEtlAuthConfig]', 'username')\n"
"\t - Missing parameter in ('connection', 'function-after[parse_name(), MatillionEtlAuthConfig]', 'password')\n"
"\t - Invalid parameter value for ('connection', 'function-after[parse_name(), MatillionDpcAuthConfig]', 'type')",
Comment on lines 406 to +412
Copy link

Copilot AI Mar 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This assertion depends on Pydantic's internal error loc formatting (e.g., function-after[parse_name(), ...]) and on the exact ordering of validation errors, which is brittle across Pydantic/model changes. Prefer asserting on smaller, stable substrings (e.g., that the message mentions MatillionConnection and that hostPort/username/password are missing) rather than the full, fully-qualified loc tuples.

Copilot uses AI. Check for mistakes.
str(err.exception),
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import java.util.List;
import org.openmetadata.schema.services.connections.pipeline.MatillionConnection;
import org.openmetadata.schema.services.connections.pipeline.matillion.MatillionDPCAuth;
import org.openmetadata.schema.services.connections.pipeline.matillion.MatillionETLAuth;
import org.openmetadata.schema.utils.JsonUtils;

Expand All @@ -30,7 +31,9 @@ public Object convert(Object object) {
MatillionConnection matillionConnection =
(MatillionConnection) JsonUtils.convertValue(object, this.clazz);

tryToConvertOrFail(matillionConnection.getConnection(), List.of(MatillionETLAuth.class))
tryToConvertOrFail(
matillionConnection.getConnection(),
List.of(MatillionETLAuth.class, MatillionDPCAuth.class))
Comment on lines +35 to +36
Copy link

Copilot AI Mar 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The indentation in this multi-line tryToConvertOrFail(...) call doesn’t match the project’s standard Java formatting (and is likely to be rewritten by Spotless / google-java-format). Please reformat this block (e.g., by running mvn spotless:apply) so the continuation indentation is consistent and CI formatting checks don’t fail.

Suggested change
matillionConnection.getConnection(),
List.of(MatillionETLAuth.class, MatillionDPCAuth.class))
matillionConnection.getConnection(),
List.of(MatillionETLAuth.class, MatillionDPCAuth.class))

Copilot uses AI. Check for mistakes.
.ifPresent(matillionConnection::setConnection);

return matillionConnection;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.openmetadata.schema.services.connections.database.TrinoConnection;
import org.openmetadata.schema.services.connections.database.datalake.GCSConfig;
import org.openmetadata.schema.services.connections.pipeline.AirflowConnection;
import org.openmetadata.schema.services.connections.pipeline.MatillionConnection;
import org.openmetadata.schema.services.connections.search.ElasticSearchConnection;
import org.openmetadata.schema.services.connections.storage.GCSConnection;

Expand Down Expand Up @@ -52,6 +53,7 @@ public class ClassConverterFactoryTest {
Workflow.class,
SalesforceConnection.class,
IcebergConnection.class,
MatillionConnection.class,
})
void testClassConverterIsSet(Class<?> clazz) {
assertFalse(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package org.openmetadata.service.secrets.converter;

import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;

import org.junit.jupiter.api.Test;
import org.openmetadata.schema.services.connections.pipeline.MatillionConnection;
import org.openmetadata.schema.services.connections.pipeline.matillion.MatillionDPCAuth;
import org.openmetadata.schema.services.connections.pipeline.matillion.MatillionETLAuth;
import org.openmetadata.schema.utils.JsonUtils;

class MatillionConnectionClassConverterTest {

private final ClassConverter converter =
ClassConverterFactory.getConverter(MatillionConnection.class);

@Test
void testConvertsETLAuth() {
MatillionETLAuth etlAuth = new MatillionETLAuth().withHostPort("https://matillion.example.com");

MatillionConnection input = new MatillionConnection().withConnection(etlAuth);
Object rawInput = JsonUtils.readValue(JsonUtils.pojoToJson(input), Object.class);

MatillionConnection result = (MatillionConnection) converter.convert(rawInput);

assertNotNull(result);
assertInstanceOf(MatillionETLAuth.class, result.getConnection());
}

@Test
void testConvertsDPCAuth() {
MatillionDPCAuth dpcAuth =
new MatillionDPCAuth().withClientId("client-id").withClientSecret("secret");

MatillionConnection input = new MatillionConnection().withConnection(dpcAuth);
Object rawInput = JsonUtils.readValue(JsonUtils.pojoToJson(input), Object.class);

MatillionConnection result = (MatillionConnection) converter.convert(rawInput);

assertNotNull(result);
assertInstanceOf(MatillionDPCAuth.class, result.getConnection());
}

@Test
void testNullConnectionDoesNotThrow() {
MatillionConnection input = new MatillionConnection();
Object rawInput = JsonUtils.readValue(JsonUtils.pojoToJson(input), Object.class);

MatillionConnection result = (MatillionConnection) converter.convert(rawInput);

assertNotNull(result);
assertNull(result.getConnection());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
{
"$id": "https://open-metadata.org/schema/entity/services/connections/pipeline/matillion/matillionDPC.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Matillion DPC Auth Config",
"description": "Matillion Data Productivity Cloud Auth Config.",
"javaType": "org.openmetadata.schema.services.connections.pipeline.matillion.MatillionDPCAuth",
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"MatillionDPC"
],
"default": "MatillionDPC"
},
"clientId": {
"title": "Client ID",
"description": "OAuth2 Client ID for Matillion DPC authentication.",
"type": "string"
},
"clientSecret": {
"title": "Client Secret",
"description": "OAuth2 Client Secret for Matillion DPC authentication.",
"type": "string",
"format": "password"
},
"region": {
"title": "Region",
"description": "Matillion DPC region. Determines the API base URL.",
"type": "string",
"enum": [
"us1",
"eu1"
],
"default": "us1"
},
"personalAccessToken": {
"title": "Personal Access Token",
"description": "Personal Access Token for Matillion DPC. Alternative to OAuth2 Client Credentials.",
"type": "string",
"format": "password"
}
},
"required": [],
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

MatillionDPC auth config currently has an empty required list, which means an empty object (or one with only region) can pass schema validation. This will allow invalid connections to be saved and then fail later at runtime. Consider enforcing that either personalAccessToken is provided, or both clientId and clientSecret are provided (and ideally disallow providing both auth methods at once).

Suggested change
"required": [],
"required": [],
"anyOf": [
{
"required": [
"personalAccessToken"
]
},
{
"required": [
"clientId",
"clientSecret"
]
}
],

Copilot uses AI. Check for mistakes.
"additionalProperties": false
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@
"oneOf": [
{
"$ref": "matillion/matillionETL.json"
},
{
"$ref": "matillion/matillionDPC.json"
}
]
},
Expand All @@ -36,10 +39,18 @@
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
"title": "Default Pipeline Filter Pattern"
},
"lineageLookbackDays": {
"title": "Lineage Lookback Days",
"description": "Number of days to look back when fetching lineage events from Matillion DPC OpenLineage API.",
"type": "integer",
"default": 30,
"minimum": 1,
"maximum": 365
},
Comment on lines 28 to +49
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New schema fields (MatillionDPC auth option and lineageLookbackDays) aren’t covered by the existing configuration parsing tests for Matillion. Adding a unit test that parses a valid DPC config (PAT and/or client credentials) and asserts validation failures for missing credentials / out-of-range lineageLookbackDays would prevent regressions in schema-to-model generation and validation behavior.

Copilot uses AI. Check for mistakes.
Comment on lines +42 to +49
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Quality: lineageLookbackDays is DPC-specific but on shared connection

The lineageLookbackDays property is placed at the top-level matillionConnection.json schema, meaning it will be visible for both ETL and DPC connection types. However, its description explicitly says it's for the "Matillion DPC OpenLineage API." If ETL connections don't use lineage lookback, this could confuse users configuring an ETL connection. Consider either:

  1. Moving it inside the matillionDPC.json schema, or
  2. Updating the description to be generic if it applies to both types.

Suggested fix:

Either move lineageLookbackDays into matillionDPC.json, or update the description to not mention DPC specifically if it applies to both connection types.

Was this helpful? React with 👍 / 👎 | Reply gitar fix to apply this suggestion

"supportsMetadataExtraction": {
"title": "Supports Metadata Extraction",
"$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction"
}
},
"additionalProperties": false
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1959,6 +1959,11 @@ export interface ConfigObject {
* List of IDs of your DBT cloud projects seperated by comma `,`
*/
projectIds?: string[];
/**
* Number of days to look back when fetching lineage events from Matillion DPC OpenLineage
* API.
*/
lineageLookbackDays?: number;
/**
* The name of your azure data factory.
*/
Expand Down Expand Up @@ -3637,6 +3642,8 @@ export interface GCPImpersonateServiceAccountValues {
*
* Matillion ETL Auth Config.
*
* Matillion Data Productivity Cloud Auth Config.
*
* Choose between mysql and postgres connection for alation database
*/
export interface ConfigConnection {
Expand Down Expand Up @@ -3806,6 +3813,22 @@ export interface ConfigConnection {
*/
databaseMode?: string;
supportsViewLineageExtraction?: boolean;
/**
* OAuth2 Client ID for Matillion DPC authentication.
*/
clientId?: string;
/**
* OAuth2 Client Secret for Matillion DPC authentication.
*/
clientSecret?: string;
/**
* Personal Access Token for Matillion DPC. Alternative to OAuth2 Client Credentials.
*/
personalAccessToken?: string;
/**
* Matillion DPC region. Determines the API base URL.
*/
region?: Region;
}

/**
Expand Down Expand Up @@ -3836,6 +3859,14 @@ export enum Provider {
LDAP = "ldap",
}

/**
* Matillion DPC region. Determines the API base URL.
*/
export enum Region {
Eu1 = "eu1",
Us1 = "us1",
}

/**
* Storage config to store sample data
*/
Expand Down Expand Up @@ -3998,6 +4029,7 @@ export enum SSLMode {
*/
export enum ConnectionType {
Backend = "Backend",
MatillionDPC = "MatillionDPC",
MatillionETL = "MatillionETL",
Mysql = "Mysql",
Postgres = "Postgres",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ export interface ConfigObject {
*
* Matillion Auth Configuration
*/
connection?: MetadataDatabaseConnection;
connection?: ConnectionClass;
/**
* Pipeline Service Management/UI URI.
*
Expand Down Expand Up @@ -313,6 +313,11 @@ export interface ConfigObject {
* List of IDs of your DBT cloud projects seperated by comma `,`
*/
projectIds?: string[];
/**
* Number of days to look back when fetching lineage events from Matillion DPC OpenLineage
* API.
*/
lineageLookbackDays?: number;
/**
* Available sources to fetch metadata.
*/
Expand Down Expand Up @@ -704,8 +709,10 @@ export interface AzureCredentials {
* Matillion Auth Configuration
*
* Matillion ETL Auth Config.
*
* Matillion Data Productivity Cloud Auth Config.
*/
export interface MetadataDatabaseConnection {
export interface ConnectionClass {
/**
* Regex exclude pipelines.
*/
Expand Down Expand Up @@ -825,6 +832,22 @@ export interface MetadataDatabaseConnection {
*/
password?: string;
supportsViewLineageExtraction?: boolean;
/**
* OAuth2 Client ID for Matillion DPC authentication.
*/
clientId?: string;
/**
* OAuth2 Client Secret for Matillion DPC authentication.
*/
clientSecret?: string;
/**
* Personal Access Token for Matillion DPC. Alternative to OAuth2 Client Credentials.
*/
personalAccessToken?: string;
/**
* Matillion DPC region. Determines the API base URL.
*/
region?: Region;
}

/**
Expand Down Expand Up @@ -875,6 +898,14 @@ export interface FilterPattern {
includes?: string[];
}

/**
* Matillion DPC region. Determines the API base URL.
*/
export enum Region {
Eu1 = "eu1",
Us1 = "us1",
}

/**
* Storage config to store sample data
*/
Expand Down Expand Up @@ -990,6 +1021,7 @@ export enum SSLMode {
*/
export enum Type {
Backend = "Backend",
MatillionDPC = "MatillionDPC",
MatillionETL = "MatillionETL",
Mysql = "Mysql",
Postgres = "Postgres",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4716,6 +4716,11 @@ export interface ConfigObject {
* List of IDs of your DBT cloud projects seperated by comma `,`
*/
projectIds?: string[];
/**
* Number of days to look back when fetching lineage events from Matillion DPC OpenLineage
* API.
*/
lineageLookbackDays?: number;
/**
* The name of your azure data factory.
*/
Expand Down Expand Up @@ -5927,6 +5932,8 @@ export interface ConfigSourceConnection {
* Matillion Auth Configuration
*
* Matillion ETL Auth Config.
*
* Matillion Data Productivity Cloud Auth Config.
*/
export interface ConfigConnection {
/**
Expand Down Expand Up @@ -6095,6 +6102,22 @@ export interface ConfigConnection {
*/
databaseMode?: string;
supportsViewLineageExtraction?: boolean;
/**
* OAuth2 Client ID for Matillion DPC authentication.
*/
clientId?: string;
/**
* OAuth2 Client Secret for Matillion DPC authentication.
*/
clientSecret?: string;
/**
* Personal Access Token for Matillion DPC. Alternative to OAuth2 Client Credentials.
*/
personalAccessToken?: string;
/**
* Matillion DPC region. Determines the API base URL.
*/
region?: Region;
}

/**
Expand Down Expand Up @@ -6125,6 +6148,14 @@ export enum Provider {
LDAP = "ldap",
}

/**
* Matillion DPC region. Determines the API base URL.
*/
export enum Region {
Eu1 = "eu1",
Us1 = "us1",
}

/**
* Storage config to store sample data
*/
Expand Down Expand Up @@ -6287,6 +6318,7 @@ export enum SSLMode {
*/
export enum ConnectionType {
Backend = "Backend",
MatillionDPC = "MatillionDPC",
MatillionETL = "MatillionETL",
Mysql = "Mysql",
Postgres = "Postgres",
Expand Down
Loading
Loading