This is an automated email from the ASF dual-hosted git repository. jiafengzheng pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push: new 18433d7105 Spark load import kerberos parameter modification (#12924) 18433d7105 is described below commit 18433d710549ff8ba54afed74e6be6795770a8b3 Author: caoliang-web <71004656+caoliang-...@users.noreply.github.com> AuthorDate: Mon Sep 26 12:24:43 2022 +0800 Spark load import kerberos parameter modification (#12924) Spark load import kerberos parameter modification --- .../import/import-way/spark-load-manual.md | 30 +++++++++++----------- .../import/import-way/spark-load-manual.md | 30 +++++++++++----------- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/docs/en/docs/data-operate/import/import-way/spark-load-manual.md b/docs/en/docs/data-operate/import/import-way/spark-load-manual.md index 071d0c3401..bd66db6b6f 100644 --- a/docs/en/docs/data-operate/import/import-way/spark-load-manual.md +++ b/docs/en/docs/data-operate/import/import-way/spark-load-manual.md @@ -154,10 +154,10 @@ PROPERTIES working_dir = path, broker = broker_name, broker.property_key = property_value, - hadoop.security.authentication = kerberos, - kerberos_principal = do...@your.com, - kerberos_keytab = /home/doris/my.keytab - kerberos_keytab_content = ASDOWHDLAWIDJHWLDKSALDJSDIWALD + broker.hadoop.security.authentication = kerberos, + broker.kerberos_principal = do...@your.com, + broker.kerberos_keytab = /home/doris/my.keytab + broker.kerberos_keytab_content = ASDOWHDLAWIDJHWLDKSALDJSDIWALD ) -- drop spark resource @@ -194,10 +194,10 @@ REVOKE USAGE_PRIV ON RESOURCE resource_name FROM ROLE role_name - Other parameters are optional, refer to `http://spark.apache.org/docs/latest/configuration.html` - `working_dir`: directory used by ETL. Spark is required when used as an ETL resource. For example: `hdfs://host :port/tmp/doris`. -- `hadoop.security.authentication`: Specify the authentication method as kerberos. -- `kerberos_principal`: Specify the principal of kerberos. -- `kerberos_keytab`: Specify the path to the keytab file for kerberos. The file must be an absolute path to a file on the server where the broker process is located. And can be accessed by the Broker process. -- `kerberos_keytab_content`: Specify the content of the keytab file in kerberos after base64 encoding. You can choose one of these with `kerberos_keytab` configuration. +- `broker.hadoop.security.authentication`: Specify the authentication method as kerberos. +- `broker.kerberos_principal`: Specify the principal of kerberos. +- `broker.kerberos_keytab`: Specify the path to the keytab file for kerberos. The file must be an absolute path to a file on the server where the broker process is located. And can be accessed by the Broker process. +- `broker.kerberos_keytab_content`: Specify the content of the keytab file in kerberos after base64 encoding. You can choose one of these with `broker.kerberos_keytab` configuration. - `broker`: the name of the broker. Spark is required when used as an ETL resource. You need to use the 'alter system add broker' command to complete the configuration in advance. - `broker.property_key`: the authentication information that the broker needs to specify when reading the intermediate file generated by ETL. - `env`: Specify the spark environment variable and support dynamic setting. For example, when the authentication mode of Hadoop is simple, set the Hadoop user name and password @@ -242,10 +242,10 @@ PROPERTIES If Spark load accesses Hadoop cluster resources with Kerberos authentication, we only need to specify the following parameters when creating Spark resources: -- `hadoop.security.authentication`: Specify the authentication method as kerberos. -- `kerberos_principal`: Specify the principal of kerberos. -- `kerberos_keytab`: Specify the path to the keytab file for kerberos. The file must be an absolute path to a file on the server where the broker process is located. And can be accessed by the Broker process. -- `kerberos_keytab_content`: Specify the content of the keytab file in kerberos after base64 encoding. You can choose one of these with `kerberos_keytab` configuration. +- `broker.hadoop.security.authentication`: Specify the authentication method as kerberos. +- `broker.kerberos_principal`: Specify the principal of kerberos. +- `broker.kerberos_keytab`: Specify the path to the keytab file for kerberos. The file must be an absolute path to a file on the server where the broker process is located. And can be accessed by the Broker process. +- `broker.kerberos_keytab_content`: Specify the content of the keytab file in kerberos after base64 encoding. You can choose one of these with `kerberos_keytab` configuration. Example: @@ -264,9 +264,9 @@ PROPERTIES "spark.hadoop.fs.defaultFS" = "hdfs://127.0.0.1:10000", "working_dir" = "hdfs://127.0.0.1:10000/tmp/doris", "broker" = "broker0", - "hadoop.security.authentication" = "kerberos", - "kerberos_principal" = "do...@your.com", - "kerberos_keytab" = "/home/doris/my.keytab" + "broker.hadoop.security.authentication" = "kerberos", + "broker.kerberos_principal" = "do...@your.com", + "broker.kerberos_keytab" = "/home/doris/my.keytab" ); ``` diff --git a/docs/zh-CN/docs/data-operate/import/import-way/spark-load-manual.md b/docs/zh-CN/docs/data-operate/import/import-way/spark-load-manual.md index 28d7eac8b7..43e30139b8 100644 --- a/docs/zh-CN/docs/data-operate/import/import-way/spark-load-manual.md +++ b/docs/zh-CN/docs/data-operate/import/import-way/spark-load-manual.md @@ -127,10 +127,10 @@ PROPERTIES working_dir = path, broker = broker_name, broker.property_key = property_value, - hadoop.security.authentication = kerberos, - kerberos_principal = do...@your.com, - kerberos_keytab = /home/doris/my.keytab - kerberos_keytab_content = ASDOWHDLAWIDJHWLDKSALDJSDIWALD + broker.hadoop.security.authentication = kerberos, + broker.kerberos_principal = do...@your.com, + broker.kerberos_keytab = /home/doris/my.keytab + broker.kerberos_keytab_content = ASDOWHDLAWIDJHWLDKSALDJSDIWALD ) -- drop spark resource @@ -162,10 +162,10 @@ REVOKE USAGE_PRIV ON RESOURCE resource_name FROM ROLE role_name - `spark.hadoop.fs.defaultFS`: master为yarn时必填。 - 其他参数为可选,参考http://spark.apache.org/docs/latest/configuration.html - `working_dir`: ETL 使用的目录。spark作为ETL资源使用时必填。例如:hdfs://host:port/tmp/doris。 -- `hadoop.security.authentication`:指定认证方式为 kerberos。 -- `kerberos_principal`:指定 kerberos 的 principal。 -- `kerberos_keytab`:指定 kerberos 的 keytab 文件路径。该文件必须为 Broker 进程所在服务器上的文件的绝对路径。并且可以被 Broker 进程访问。 -- `kerberos_keytab_content`:指定 kerberos 中 keytab 文件内容经过 base64 编码之后的内容。这个跟 `kerberos_keytab` 配置二选一即可。 +- `broker.hadoop.security.authentication`:指定认证方式为 kerberos。 +- `broker.kerberos_principal`:指定 kerberos 的 principal。 +- `broker.kerberos_keytab`:指定 kerberos 的 keytab 文件路径。该文件必须为 Broker 进程所在服务器上的文件的绝对路径。并且可以被 Broker 进程访问。 +- `broker.kerberos_keytab_content`:指定 kerberos 中 keytab 文件内容经过 base64 编码之后的内容。这个跟 `broker.kerberos_keytab` 配置二选一即可。 - `broker`: broker 名字。spark 作为 ETL 资源使用时必填。需要使用 `ALTER SYSTEM ADD BROKER` 命令提前完成配置。 - `broker.property_key`: broker 读取 ETL 生成的中间文件时需要指定的认证信息等。 - `env`: 指定spark环境变量,支持动态设置,比如当认证hadoop认为方式为simple时,设置hadoop用户名和密码 @@ -210,10 +210,10 @@ PROPERTIES 如果是 Spark load 访问带有 Kerberos 认证的 Hadoop 集群资源,我们只需要在创建 Spark resource 的时候指定以下参数即可: -- `hadoop.security.authentication`:指定认证方式为 kerberos。 -- `kerberos_principal`:指定 kerberos 的 principal。 -- `kerberos_keytab`:指定 kerberos 的 keytab 文件路径。该文件必须为 Broker 进程所在服务器上的文件的绝对路径。并且可以被 Broker 进程访问。 -- `kerberos_keytab_content`:指定 kerberos 中 keytab 文件内容经过 base64 编码之后的内容。这个跟 `kerberos_keytab` 配置二选一即可。 +- `broker.hadoop.security.authentication`:指定认证方式为 kerberos。 +- `broker.kerberos_principal`:指定 kerberos 的 principal。 +- `broker.kerberos_keytab`:指定 kerberos 的 keytab 文件路径。该文件必须为 Broker 进程所在服务器上的文件的绝对路径。并且可以被 Broker 进程访问。 +- `broker.kerberos_keytab_content`:指定 kerberos 中 keytab 文件内容经过 base64 编码之后的内容。这个跟 `kerberos_keytab` 配置二选一即可。 实例: @@ -232,9 +232,9 @@ PROPERTIES "spark.hadoop.fs.defaultFS" = "hdfs://127.0.0.1:10000", "working_dir" = "hdfs://127.0.0.1:10000/tmp/doris", "broker" = "broker0", - "hadoop.security.authentication" = "kerberos", - "kerberos_principal" = "do...@your.com", - "kerberos_keytab" = "/home/doris/my.keytab" + "broker.hadoop.security.authentication" = "kerberos", + "broker.kerberos_principal" = "do...@your.com", + "broker.kerberos_keytab" = "/home/doris/my.keytab" ); ``` --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org