danielcweeks commented on code in PR #6428:
URL: https://github.com/apache/iceberg/pull/6428#discussion_r1063850430


##########
snowflake/src/main/java/org/apache/iceberg/snowflake/SnowflakeCatalog.java:
##########
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iceberg.snowflake;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.apache.iceberg.BaseMetastoreCatalog;
+import org.apache.iceberg.CatalogProperties;
+import org.apache.iceberg.CatalogUtil;
+import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.NoSuchNamespaceException;
+import org.apache.iceberg.hadoop.Configurable;
+import org.apache.iceberg.io.CloseableGroup;
+import org.apache.iceberg.io.FileIO;
+import org.apache.iceberg.jdbc.JdbcClientPool;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.snowflake.entities.SnowflakeIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class SnowflakeCatalog extends BaseMetastoreCatalog
+    implements Closeable, SupportsNamespaces, Configurable<Object> {
+  public static final String DEFAULT_CATALOG_NAME = "snowflake_catalog";
+  public static final String DEFAULT_FILE_IO_IMPL = 
"org.apache.iceberg.hadoop.HadoopFileIO";
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(SnowflakeCatalog.class);
+
+  private CloseableGroup closeableGroup;
+  private Object conf;
+  private String catalogName;
+  private Map<String, String> catalogProperties;
+  private FileIO fileIO;
+  private SnowflakeClient snowflakeClient;
+
+  public SnowflakeCatalog() {}
+
+  @Override
+  public List<TableIdentifier> listTables(Namespace namespace) {
+    LOG.debug("listTables with namespace: {}", namespace);
+    SnowflakeIdentifier scope = 
NamespaceHelpers.getSnowflakeIdentifierForNamespace(namespace);
+    Preconditions.checkArgument(
+        scope.getType() == SnowflakeIdentifier.Type.ROOT
+            || scope.getType() == SnowflakeIdentifier.Type.DATABASE
+            || scope.getType() == SnowflakeIdentifier.Type.SCHEMA,
+        "listTables must be at ROOT, DATABASE, or SCHEMA level; got %s from 
namespace %s",
+        scope,
+        namespace);
+
+    List<SnowflakeIdentifier> sfTables = 
snowflakeClient.listIcebergTables(scope);
+
+    return sfTables.stream()
+        .map(
+            table ->
+                TableIdentifier.of(
+                    table.getDatabaseName(), table.getSchemaName(), 
table.getTableName()))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public boolean dropTable(TableIdentifier identifier, boolean purge) {
+    throw new UnsupportedOperationException(
+        String.format("dropTable not supported; attempted for table '%s'", 
identifier));
+  }
+
+  @Override
+  public void renameTable(TableIdentifier from, TableIdentifier to) {
+    throw new UnsupportedOperationException(
+        String.format("renameTable not supported; attempted from '%s' to 
'%s'", from, to));
+  }
+
+  @Override
+  public void initialize(String name, Map<String, String> properties) {
+    String uri = properties.get(CatalogProperties.URI);
+    Preconditions.checkNotNull(uri, "JDBC connection URI is required");
+    try {
+      // We'll ensure the expected JDBC driver implementation class is 
initialized through
+      // reflection
+      // regardless of which classloader ends up using this 
JdbcSnowflakeClient, but we'll only
+      // warn if the expected driver fails to load, since users may use 
repackaged or custom
+      // JDBC drivers for Snowflake communcation.
+      Class.forName(JdbcSnowflakeClient.EXPECTED_JDBC_IMPL);
+    } catch (ClassNotFoundException cnfe) {
+      LOG.warn(
+          "Failed to load expected JDBC SnowflakeDriver - if queries fail by 
failing"
+              + " to find a suitable driver for jdbc:snowflake:// URIs, you 
must add the Snowflake "
+              + " JDBC driver to your jars/packages",
+          cnfe);
+    }
+    JdbcClientPool connectionPool = new JdbcClientPool(uri, properties);
+
+    String fileIOImpl = DEFAULT_FILE_IO_IMPL;
+    if (properties.containsKey(CatalogProperties.FILE_IO_IMPL)) {
+      fileIOImpl = properties.get(CatalogProperties.FILE_IO_IMPL);
+    }
+
+    initialize(
+        name,
+        new JdbcSnowflakeClient(connectionPool),
+        CatalogUtil.loadFileIO(fileIOImpl, properties, conf),
+        properties);
+  }
+
+  /**
+   * Initialize using caller-supplied SnowflakeClient and FileIO.
+   *
+   * @param name The name of the catalog, defaults to "snowflake_catalog"
+   * @param snowflakeClient The client encapsulating network communication 
with Snowflake
+   * @param fileIO The {@link FileIO} to use for table operations
+   * @param properties The catalog options to use and propagate to dependencies
+   */
+  @SuppressWarnings("checkstyle:HiddenField")
+  public void initialize(
+      String name, SnowflakeClient snowflakeClient, FileIO fileIO, Map<String, 
String> properties) {
+    Preconditions.checkArgument(null != snowflakeClient, "snowflakeClient must 
be non-null");
+    Preconditions.checkArgument(null != fileIO, "fileIO must be non-null");
+    this.catalogName = name == null ? DEFAULT_CATALOG_NAME : name;
+    this.snowflakeClient = snowflakeClient;
+    this.fileIO = fileIO;
+    this.catalogProperties = properties;
+    this.closeableGroup = new CloseableGroup();
+    closeableGroup.addCloseable(snowflakeClient);
+    closeableGroup.addCloseable(fileIO);
+    closeableGroup.setSuppressCloseFailure(true);
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (null != closeableGroup) {
+      closeableGroup.close();
+    }
+  }
+
+  @Override
+  public void createNamespace(Namespace namespace, Map<String, String> 
metadata) {
+    throw new UnsupportedOperationException(
+        String.format("createNamespace not supported; attempted for namespace 
'%s'", namespace));
+  }
+
+  @Override
+  public List<Namespace> listNamespaces(Namespace namespace) {
+    LOG.debug("listNamespaces with namespace: {}", namespace);
+    SnowflakeIdentifier scope = 
NamespaceHelpers.getSnowflakeIdentifierForNamespace(namespace);
+    Preconditions.checkArgument(
+        scope.getType() == SnowflakeIdentifier.Type.ROOT
+            || scope.getType() == SnowflakeIdentifier.Type.DATABASE,
+        "listNamespaces must be at either ROOT or DATABASE level; got %s from 
namespace %s",
+        scope,
+        namespace);
+    List<SnowflakeIdentifier> sfSchemas = snowflakeClient.listSchemas(scope);
+
+    List<Namespace> namespaceList =
+        sfSchemas.stream()
+            .map(
+                schema -> {
+                  Preconditions.checkState(
+                      schema.getType() == SnowflakeIdentifier.Type.SCHEMA,
+                      "Got identifier of type %s from listSchemas for %s",
+                      schema.getType(),
+                      namespace);
+                  return Namespace.of(schema.getDatabaseName(), 
schema.getSchemaName());
+                })
+            .collect(Collectors.toList());
+    return namespaceList;
+  }
+
+  @Override
+  public Map<String, String> loadNamespaceMetadata(Namespace namespace)
+      throws NoSuchNamespaceException {
+    LOG.debug("loadNamespaceMetadata with namespace: {}", namespace);
+    return ImmutableMap.of();
+  }
+
+  @Override
+  public boolean dropNamespace(Namespace namespace) {
+    throw new UnsupportedOperationException(
+        String.format("dropNamespace not supported; attempted for namespace 
'%s'", namespace));
+  }
+
+  @Override
+  public boolean setProperties(Namespace namespace, Map<String, String> 
properties) {
+    throw new UnsupportedOperationException(
+        String.format("setProperties not supported; attempted for namespace 
'%s'", namespace));
+  }
+
+  @Override
+  public boolean removeProperties(Namespace namespace, Set<String> properties) 
{
+    throw new UnsupportedOperationException(
+        String.format("removeProperties not supported; attempted for namespace 
'%s'", namespace));
+  }
+
+  @Override
+  protected TableOperations newTableOps(TableIdentifier tableIdentifier) {
+    return new SnowflakeTableOperations(

Review Comment:
   Eventually we would probably update the underlying FileIO to address the 
cross bucket issues, but that isn't particularly common for a single table at 
this point.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to