Create Connection
datazone_create_connection | R Documentation |
Creates a new connection¶
Description¶
Creates a new connection. In Amazon DataZone, a connection enables you to connect your resources (domains, projects, and environments) to external resources and services.
Usage¶
datazone_create_connection(awsLocation, clientToken, description,
domainIdentifier, environmentIdentifier, name, props)
Arguments¶
awsLocation
The location where the connection is created.
clientToken
A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.
description
A connection description.
domainIdentifier
[required] The ID of the domain where the connection is created.
environmentIdentifier
[required] The ID of the environment where the connection is created.
name
[required] The connection name.
props
The connection props.
Value¶
A list with the following syntax:
list(
connectionId = "string",
description = "string",
domainId = "string",
domainUnitId = "string",
environmentId = "string",
name = "string",
physicalEndpoints = list(
list(
awsLocation = list(
accessRole = "string",
awsAccountId = "string",
awsRegion = "string",
iamConnectionId = "string"
),
glueConnection = list(
athenaProperties = list(
"string"
),
authenticationConfiguration = list(
authenticationType = "BASIC"|"OAUTH2"|"CUSTOM",
oAuth2Properties = list(
authorizationCodeProperties = list(
authorizationCode = "string",
redirectUri = "string"
),
oAuth2ClientApplication = list(
aWSManagedClientApplicationReference = "string",
userManagedClientApplicationClientId = "string"
),
oAuth2Credentials = list(
accessToken = "string",
jwtToken = "string",
refreshToken = "string",
userManagedClientApplicationClientSecret = "string"
),
oAuth2GrantType = "AUTHORIZATION_CODE"|"CLIENT_CREDENTIALS"|"JWT_BEARER",
tokenUrl = "string",
tokenUrlParametersMap = list(
"string"
)
),
secretArn = "string"
),
compatibleComputeEnvironments = list(
"SPARK"|"ATHENA"|"PYTHON"
),
connectionProperties = list(
"string"
),
connectionSchemaVersion = 123,
connectionType = "ATHENA"|"BIGQUERY"|"DATABRICKS"|"DOCUMENTDB"|"DYNAMODB"|"HYPERPOD"|"IAM"|"MYSQL"|"OPENSEARCH"|"ORACLE"|"POSTGRESQL"|"REDSHIFT"|"SAPHANA"|"SNOWFLAKE"|"SPARK"|"SQLSERVER"|"TERADATA"|"VERTICA"|"WORKFLOWS_MWAA",
creationTime = as.POSIXct(
"2015-01-01"
),
description = "string",
lastConnectionValidationTime = as.POSIXct(
"2015-01-01"
),
lastUpdatedBy = "string",
lastUpdatedTime = as.POSIXct(
"2015-01-01"
),
matchCriteria = list(
"string"
),
name = "string",
physicalConnectionRequirements = list(
availabilityZone = "string",
securityGroupIdList = list(
"string"
),
subnetId = "string",
subnetIdList = list(
"string"
)
),
pythonProperties = list(
"string"
),
sparkProperties = list(
"string"
),
status = "CREATING"|"CREATE_FAILED"|"DELETING"|"DELETE_FAILED"|"READY"|"UPDATING"|"UPDATE_FAILED"|"DELETED",
statusReason = "string"
),
glueConnectionName = "string",
host = "string",
port = 123,
protocol = "ATHENA"|"GLUE_INTERACTIVE_SESSION"|"HTTPS"|"JDBC"|"LIVY"|"ODBC"|"PRISM",
stage = "string"
)
),
projectId = "string",
props = list(
athenaProperties = list(
workgroupName = "string"
),
glueProperties = list(
errorMessage = "string",
status = "CREATING"|"CREATE_FAILED"|"DELETING"|"DELETE_FAILED"|"READY"|"UPDATING"|"UPDATE_FAILED"|"DELETED"
),
hyperPodProperties = list(
clusterArn = "string",
clusterName = "string",
orchestrator = "EKS"|"SLURM"
),
iamProperties = list(
environmentId = "string",
glueLineageSyncEnabled = TRUE|FALSE
),
redshiftProperties = list(
credentials = list(
secretArn = "string",
usernamePassword = list(
password = "string",
username = "string"
)
),
databaseName = "string",
isProvisionedSecret = TRUE|FALSE,
jdbcIamUrl = "string",
jdbcUrl = "string",
lineageSync = list(
enabled = TRUE|FALSE,
lineageJobId = "string",
schedule = list(
schedule = "string"
)
),
redshiftTempDir = "string",
status = "CREATING"|"CREATE_FAILED"|"DELETING"|"DELETE_FAILED"|"READY"|"UPDATING"|"UPDATE_FAILED"|"DELETED",
storage = list(
clusterName = "string",
workgroupName = "string"
)
),
sparkEmrProperties = list(
computeArn = "string",
credentials = list(
password = "string",
username = "string"
),
credentialsExpiration = as.POSIXct(
"2015-01-01"
),
governanceType = "AWS_MANAGED"|"USER_MANAGED",
instanceProfileArn = "string",
javaVirtualEnv = "string",
livyEndpoint = "string",
logUri = "string",
pythonVirtualEnv = "string",
runtimeRole = "string",
trustedCertificatesS3Uri = "string"
),
sparkGlueProperties = list(
additionalArgs = list(
connection = "string"
),
glueConnectionName = "string",
glueVersion = "string",
idleTimeout = 123,
javaVirtualEnv = "string",
numberOfWorkers = 123,
pythonVirtualEnv = "string",
workerType = "string"
)
),
type = "ATHENA"|"BIGQUERY"|"DATABRICKS"|"DOCUMENTDB"|"DYNAMODB"|"HYPERPOD"|"IAM"|"MYSQL"|"OPENSEARCH"|"ORACLE"|"POSTGRESQL"|"REDSHIFT"|"SAPHANA"|"SNOWFLAKE"|"SPARK"|"SQLSERVER"|"TERADATA"|"VERTICA"|"WORKFLOWS_MWAA"
)
Request syntax¶
svc$create_connection(
awsLocation = list(
accessRole = "string",
awsAccountId = "string",
awsRegion = "string",
iamConnectionId = "string"
),
clientToken = "string",
description = "string",
domainIdentifier = "string",
environmentIdentifier = "string",
name = "string",
props = list(
athenaProperties = list(
workgroupName = "string"
),
glueProperties = list(
glueConnectionInput = list(
athenaProperties = list(
"string"
),
authenticationConfiguration = list(
authenticationType = "BASIC"|"OAUTH2"|"CUSTOM",
basicAuthenticationCredentials = list(
password = "string",
userName = "string"
),
customAuthenticationCredentials = list(
"string"
),
kmsKeyArn = "string",
oAuth2Properties = list(
authorizationCodeProperties = list(
authorizationCode = "string",
redirectUri = "string"
),
oAuth2ClientApplication = list(
aWSManagedClientApplicationReference = "string",
userManagedClientApplicationClientId = "string"
),
oAuth2Credentials = list(
accessToken = "string",
jwtToken = "string",
refreshToken = "string",
userManagedClientApplicationClientSecret = "string"
),
oAuth2GrantType = "AUTHORIZATION_CODE"|"CLIENT_CREDENTIALS"|"JWT_BEARER",
tokenUrl = "string",
tokenUrlParametersMap = list(
"string"
)
),
secretArn = "string"
),
connectionProperties = list(
"string"
),
connectionType = "SNOWFLAKE"|"BIGQUERY"|"DOCUMENTDB"|"DYNAMODB"|"MYSQL"|"OPENSEARCH"|"ORACLE"|"POSTGRESQL"|"REDSHIFT"|"SAPHANA"|"SQLSERVER"|"TERADATA"|"VERTICA",
description = "string",
matchCriteria = "string",
name = "string",
physicalConnectionRequirements = list(
availabilityZone = "string",
securityGroupIdList = list(
"string"
),
subnetId = "string",
subnetIdList = list(
"string"
)
),
pythonProperties = list(
"string"
),
sparkProperties = list(
"string"
),
validateCredentials = TRUE|FALSE,
validateForComputeEnvironments = list(
"SPARK"|"ATHENA"|"PYTHON"
)
)
),
hyperPodProperties = list(
clusterName = "string"
),
iamProperties = list(
glueLineageSyncEnabled = TRUE|FALSE
),
redshiftProperties = list(
credentials = list(
secretArn = "string",
usernamePassword = list(
password = "string",
username = "string"
)
),
databaseName = "string",
host = "string",
lineageSync = list(
enabled = TRUE|FALSE,
schedule = list(
schedule = "string"
)
),
port = 123,
storage = list(
clusterName = "string",
workgroupName = "string"
)
),
sparkEmrProperties = list(
computeArn = "string",
instanceProfileArn = "string",
javaVirtualEnv = "string",
logUri = "string",
pythonVirtualEnv = "string",
runtimeRole = "string",
trustedCertificatesS3Uri = "string"
),
sparkGlueProperties = list(
additionalArgs = list(
connection = "string"
),
glueConnectionName = "string",
glueVersion = "string",
idleTimeout = 123,
javaVirtualEnv = "string",
numberOfWorkers = 123,
pythonVirtualEnv = "string",
workerType = "string"
)
)
)