morningman commented on code in PR #14978: URL: https://github.com/apache/doris/pull/14978#discussion_r1051608594
########## fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogProperty.java: ########## @@ -17,88 +17,108 @@ package org.apache.doris.datasource; -import org.apache.doris.catalog.HdfsResource; +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.Resource; import org.apache.doris.catalog.S3Resource; +import org.apache.doris.common.UserException; import org.apache.doris.common.io.Text; import org.apache.doris.common.io.Writable; import org.apache.doris.persist.gson.GsonUtils; -import com.google.common.collect.Maps; import com.google.gson.annotations.SerializedName; import lombok.Data; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; +import java.util.Optional; /** * CatalogProperty to store the properties for catalog. */ @Data public class CatalogProperty implements Writable { + private static final Logger LOG = LogManager.getLogger(CatalogProperty.class); + + @SerializedName(value = "resource") + private String resource; @SerializedName(value = "properties") - private Map<String, String> properties = Maps.newHashMap(); + private Map<String, String> properties; - public String getOrDefault(String key, String defaultVal) { - return properties.getOrDefault(key, defaultVal); + private volatile Resource catalogResource = null; + + public CatalogProperty(String resource, Map<String, String> properties) { + this.resource = resource; + this.properties = properties; } - // get all properties with dfs.* hadoop.* yarn.* hive.* - // besides dfs.* and hadoop.username, we need other properties when enable kerberos - public Map<String, String> getHdfsProperties() { - Map<String, String> dfsProperties = Maps.newHashMap(); - for (Map.Entry<String, String> entry : properties.entrySet()) { - if (entry.getKey().startsWith(HdfsResource.HADOOP_FS_PREFIX) - || entry.getKey().startsWith(HdfsResource.HADOOP_PREFIX) - || entry.getKey().startsWith(HdfsResource.HIVE_PREFIX) - || entry.getKey().startsWith(HdfsResource.YARN_PREFIX)) { - dfsProperties.put(entry.getKey(), entry.getValue()); + private Resource catalogResource() throws UserException { + if (catalogResource == null) { + synchronized (this) { + if (catalogResource == null) { + catalogResource = Optional.ofNullable(Env.getCurrentEnv().getResourceMgr().getResource(resource)) + .orElseThrow(() -> new UserException("Resource doesn't exist: " + resource)); + } } } - return dfsProperties; + return catalogResource; } - // todo: remove and use S3Resource - public Map<String, String> getS3Properties() { - Map<String, String> s3Properties = Maps.newHashMap(); - if (properties.containsKey(S3Resource.S3_ACCESS_KEY)) { - s3Properties.put("fs.s3a.access.key", properties.get(S3Resource.S3_ACCESS_KEY)); - s3Properties.put(S3Resource.S3_ACCESS_KEY, properties.get(S3Resource.S3_ACCESS_KEY)); - } - if (properties.containsKey(S3Resource.S3_SECRET_KEY)) { - s3Properties.put("fs.s3a.secret.key", properties.get(S3Resource.S3_SECRET_KEY)); - s3Properties.put(S3Resource.S3_SECRET_KEY, properties.get(S3Resource.S3_SECRET_KEY)); - } - if (properties.containsKey(S3Resource.S3_ENDPOINT)) { - s3Properties.put("fs.s3a.endpoint", properties.get(S3Resource.S3_ENDPOINT)); - s3Properties.put(S3Resource.S3_ENDPOINT, properties.get(S3Resource.S3_ENDPOINT)); - } - if (properties.containsKey(S3Resource.S3_REGION)) { - s3Properties.put("fs.s3a.endpoint.region", properties.get(S3Resource.S3_REGION)); - s3Properties.put(S3Resource.S3_REGION, properties.get(S3Resource.S3_REGION)); - } - if (properties.containsKey(S3Resource.S3_MAX_CONNECTIONS)) { - s3Properties.put("fs.s3a.connection.maximum", properties.get(S3Resource.S3_MAX_CONNECTIONS)); - s3Properties.put(S3Resource.S3_MAX_CONNECTIONS, properties.get(S3Resource.S3_MAX_CONNECTIONS)); + public String getOrDefault(String key, String defaultVal) { + if (resource == null) { + return properties.getOrDefault(key, defaultVal); + } else { + try { + return catalogResource().getCopiedProperties().getOrDefault(key, defaultVal); + } catch (UserException e) { + return defaultVal; + } } - if (properties.containsKey(S3Resource.S3_REQUEST_TIMEOUT_MS)) { - s3Properties.put("fs.s3a.connection.request.timeout", properties.get(S3Resource.S3_REQUEST_TIMEOUT_MS)); - s3Properties.put(S3Resource.S3_REQUEST_TIMEOUT_MS, properties.get(S3Resource.S3_REQUEST_TIMEOUT_MS)); + } + + public Map<String, String> getProperties() { + if (resource == null) { + return new HashMap<>(properties); + } else { + try { + return catalogResource().getCopiedProperties(); + } catch (UserException e) { Review Comment: Why throw exception ########## fe/fe-core/src/main/java/org/apache/doris/catalog/Resource.java: ########## @@ -78,6 +107,26 @@ public static Resource fromStmt(CreateResourceStmt stmt) throws DdlException { return resource; } + public synchronized boolean removeReference(String referenceName, ReferenceType type) { + String fullName = referenceName + REFERENCE_SPLIT + type.name(); + if (references.remove(fullName) != null) { + LOG.info("Reference(type={}, name={}) is removed from resource {}, current set: {}", + type, referenceName, name, references); + return true; + } + return false; + } + + public synchronized boolean addReference(String referenceName, ReferenceType type) throws AnalysisException { Review Comment: Why throw AnalysisException? I think we should not throw any exception here. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org