{"id":8712,"date":"2022-08-23T03:55:34","date_gmt":"2022-08-23T01:55:34","guid":{"rendered":"https:\/\/myoceane.fr\/?p=8712"},"modified":"2022-09-23T08:45:49","modified_gmt":"2022-09-23T06:45:49","slug":"hive-metastore-service-with-azure","status":"publish","type":"post","link":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/","title":{"rendered":"[Hive] Metastore in Azure Databricks"},"content":{"rendered":"<div id=\"fb-root\"><\/div>\n<p style=\"text-align: justify;\">\u5728\u958b\u767c Spark \u8207 Deltalake \u7684\u61c9\u7528\u7684\u6642\u5019\uff0c\u9700\u8981\u5efa\u7acb\u5f88\u591a\u7684 Table \u8207 Database \u7b49\u8cc7\u6e90\uff0c\u9019\u4e9b Table \u7684\u8cc7\u6e90\u7a76\u7adf\u662f\u600e\u9ebc\u7ba1\u7406\u7684\uff1f\u5c31\u662f Hive Metastore \u7684\u89d2\u8272\uff0c\u6211\u5011\u5728\u5f88\u81ea\u7136\u4f7f\u7528 Spark SQL \u7684\u6642\u5019\uff0c\u662f\u5426\u771f\u6b63\u4e86\u89e3\u80cc\u5f8c\u767c\u751f\u4e86\u4ec0\u9ebc\u4e8b\u60c5\uff1f\u672c\u7bc7\u6211\u5011\u7d00\u9304\u5982\u4f55\u5728 Azure Databricks \u4e0a\u9762\u4f7f\u7528\u5ba2\u88fd\u5316\u7684 Hive Metastore\u3002<\/p>\n<h5>\u5229\u7528 Azure SQL Database \u7576\u6210 Databricks \u7684 Metastore (Hive Metastore 1.2.1 \u4ee5\u4e0b) <a href=\"https:\/\/medium.com\/@margauxvanderplaetsen\/connect-azure-sql-db-as-external-hive-metastore-to-azure-databricks-spark-cluster-and-sql-endpoint-e3b9acc50338\">\u9023\u7d50<\/a><\/h5>\n<p style=\"text-align: justify;\">\u6587\u7ae0\u5617\u8a66\u5229\u7528 Azure SQL Database \u7576\u6210\u662f Databricks External \u7684 Metastore\uff0c\u518d\u6309\u7167\u6587\u7ae0\u5148\u8a2d\u5b9a SparkConf \u5982\u4e0b\uff1a<\/p>\n<pre class=\"lang:bash\">spark.sql.hive.metastore.version 1.2.1\nspark.hadoop.javax.jdo.option.ConnectionUserName user@sqlserver\nspark.hadoop.javax.jdo.option.ConnectionURL jdbc:sqlserver:\/\/sqlserver.database.windows.net:1433;database=metastore\nspark.hadoop.javax.jdo.option.ConnectionPassword password\nspark.hadoop.javax.jdo.option.ConnectionDriverName com.microsoft.sqlserver.jdbc.SQLServerDriver\nspark.sql.hive.metastore.jars maven<\/pre>\n<p>\u63a5\u8457\u5229\u7528 Notebook \u57f7\u884c CREATE DATABASE test \u537b\u7206\u51fa\u4ee5\u4e0b\u7684\u932f\u8aa4\u8a0a\u606f\uff1a<\/p>\n<pre class=\"lang:bash\">com.databricks.backend.common.rpc.SparkDriverExceptions$SQLExecutionException: org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$withClient$2(HiveExternalCatalog.scala:160)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.maybeSynchronized(HiveExternalCatalog.scala:112)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$withClient$1(HiveExternalCatalog.scala:150)\n\tat com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:377)\n\tat com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:363)\n\tat com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressReporter.scala:34)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:149)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:310)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:228)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:218)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:59)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$hiveCatalog$1(HiveSessionStateBuilder.scala:74)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.externalCatalog$lzycompute(SessionCatalog.scala:544)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.externalCatalog(SessionCatalog.scala:544)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.databaseExists(SessionCatalog.scala:763)\n\tat com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.databaseExists(ManagedCatalogSessionCatalog.scala:577)\n\tat com.databricks.sql.managedcatalog.UnityCatalogV2Proxy.$anonfun$namespaceExists$1(UnityCatalogV2Proxy.scala:120)\n\tat com.databricks.sql.managedcatalog.UnityCatalogV2Proxy.$anonfun$namespaceExists$1$adapted(UnityCatalogV2Proxy.scala:120)\n\tat com.databricks.sql.managedcatalog.UnityCatalogV2Proxy.assertSingleNamespace(UnityCatalogV2Proxy.scala:114)\n\tat com.databricks.sql.managedcatalog.UnityCatalogV2Proxy.namespaceExists(UnityCatalogV2Proxy.scala:120)\n\tat org.apache.spark.sql.execution.datasources.v2.CreateNamespaceExec.run(CreateNamespaceExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)\n\tat org.apache.spark.sql.execution.QueryExecution$anonfun$nestedInanonfun$eagerlyExecuteCommands$1$1.$anonfun$applyOrElse$1(QueryExecution.scala:202)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:240)\n\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:388)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:187)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)\n\tat org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:142)\n\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:338)\n\tat org.apache.spark.sql.execution.QueryExecution$anonfun$nestedInanonfun$eagerlyExecuteCommands$1$1.applyOrElse(QueryExecution.scala:202)\n\tat org.apache.spark.sql.execution.QueryExecution$anonfun$nestedInanonfun$eagerlyExecuteCommands$1$1.applyOrElse(QueryExecution.scala:198)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:591)\n\tat org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:178)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:591)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$super$transformDownWithPruning(LogicalPlan.scala:31)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:268)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:264)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:567)\n\tat org.apache.spark.sql.execution.QueryExecution.$anonfun$eagerlyExecuteCommands$1(QueryExecution.scala:198)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:324)\n\tat org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:198)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:183)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:174)\n\tat org.apache.spark.sql.Dataset.&lt;init&gt;(Dataset.scala:237)\n\tat org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:106)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)\n\tat org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:103)\n\tat org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:808)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)\n\tat org.apache.spark.sql.SparkSession.sql(SparkSession.scala:803)\n\tat org.apache.spark.sql.SQLContext.sql(SQLContext.scala:695)\n\tat com.databricks.backend.daemon.driver.SQLDriverLocal.$anonfun$executeSql$1(SQLDriverLocal.scala:91)\n\tat scala.collection.immutable.List.map(List.scala:293)\n\tat com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:37)\n\tat com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145)\n\tat com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$20(DriverLocal.scala:668)\n\tat com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:41)\n\tat com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$18(DriverLocal.scala:668)\n\tat com.databricks.logging.Log4jUsageLoggingShim$.$anonfun$withAttributionContext$1(Log4jUsageLoggingShim.scala:32)\n\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:94)\n\tat com.databricks.logging.Log4jUsageLoggingShim$.withAttributionContext(Log4jUsageLoggingShim.scala:30)\n\tat com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:283)\n\tat com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:282)\n\tat com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:62)\n\tat com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:318)\n\tat com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:303)\n\tat com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:62)\n\tat com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:645)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:622)\n\tat scala.util.Try$.apply(Try.scala:213)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:614)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:533)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:568)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:438)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:232)\n\tat java.lang.Thread.run(Thread.java:748)\nCaused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1305)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1290)\n\tat org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:619)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:435)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:335)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$retryLocked$1(HiveClientImpl.scala:236)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.synchronizeOnObject(HiveClientImpl.scala:272)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:228)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:315)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:435)\n\tat org.apache.spark.sql.hive.client.PoolingHiveClient.$anonfun$databaseExists$1(PoolingHiveClient.scala:321)\n\tat org.apache.spark.sql.hive.client.PoolingHiveClient.$anonfun$databaseExists$1$adapted(PoolingHiveClient.scala:320)\n\tat org.apache.spark.sql.hive.client.PoolingHiveClient.withHiveClient(PoolingHiveClient.scala:149)\n\tat org.apache.spark.sql.hive.client.PoolingHiveClient.databaseExists(PoolingHiveClient.scala:320)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:310)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$withClient$2(HiveExternalCatalog.scala:151)\n\t... 81 more\nCaused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n\tat org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.&lt;init&gt;(RetryingMetaStoreClient.java:86)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1301)\n\t... 99 more\nCaused by: java.lang.reflect.InvocationTargetException\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)\n\tat sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n\tat java.lang.reflect.Constructor.newInstance(Constructor.java:423)\n\tat org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)\n\t... 105 more\nCaused by: javax.jdo.JDODataStoreException: Required table missing : \"VERSION\" in Catalog \"\" Schema \"\". DataNucleus requires this table to perform its persistence operations. Either your MetaData is incorrect, or you need to enable \"datanucleus.autoCreateTables\"\nNestedThrowables:\norg.datanucleus.store.rdbms.exceptions.MissingTableException: Required table missing : \"VERSION\" in Catalog \"\" Schema \"\". DataNucleus requires this table to perform its persistence operations. Either your MetaData is incorrect, or you need to enable \"datanucleus.autoCreateTables\"\n\tat org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:461)\n\tat org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:732)\n\tat org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)\n\tat org.apache.hadoop.hive.metastore.ObjectStore.setMetaStoreSchemaVersion(ObjectStore.java:6773)\n\tat org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6670)\n\tat org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)\n\tat com.sun.proxy.$Proxy86.verifySchema(Unknown Source)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.&lt;init&gt;(RetryingHMSHandler.java:66)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5768)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStoreClient.&lt;init&gt;(HiveMetaStoreClient.java:199)\n\tat org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.&lt;init&gt;(SessionHiveMetaStoreClient.java:74)\n\t... 110 more\nCaused by: org.datanucleus.store.rdbms.exceptions.MissingTableException: Required table missing : \"VERSION\" in Catalog \"\" Schema \"\". DataNucleus requires this table to perform its persistence operations. Either your MetaData is incorrect, or you need to enable \"datanucleus.autoCreateTables\"\n\tat org.datanucleus.store.rdbms.table.AbstractTable.exists(AbstractTable.java:485)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:3380)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:3190)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2841)\n\tat org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:122)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605)\n\tat org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679)\n\tat org.datanucleus.store.rdbms.RDBMSStoreManager.getPropertiesForGenerator(RDBMSStoreManager.java:2045)\n\tat org.datanucleus.store.AbstractStoreManager.getStrategyValue(AbstractStoreManager.java:1365)\n\tat org.datanucleus.ExecutionContextImpl.newObjectId(ExecutionContextImpl.java:3827)\n\tat org.datanucleus.state.JDOStateManager.setIdentity(JDOStateManager.java:2571)\n\tat org.datanucleus.state.JDOStateManager.initialiseForPersistentNew(JDOStateManager.java:513)\n\tat org.datanucleus.state.ObjectProviderFactoryImpl.newForPersistentNew(ObjectProviderFactoryImpl.java:232)\n\tat org.datanucleus.ExecutionContextImpl.newObjectProviderForPersistentNew(ExecutionContextImpl.java:1414)\n\tat org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2218)\n\tat org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:2065)\n\tat org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1913)\n\tat org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)\n\tat org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:727)\n\t... 128 more\n\n\tat com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:130)\n\tat com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145)\n\tat com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$20(DriverLocal.scala:668)\n\tat com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:41)\n\tat com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$18(DriverLocal.scala:668)\n\tat com.databricks.logging.Log4jUsageLoggingShim$.$anonfun$withAttributionContext$1(Log4jUsageLoggingShim.scala:32)\n\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:94)\n\tat com.databricks.logging.Log4jUsageLoggingShim$.withAttributionContext(Log4jUsageLoggingShim.scala:30)\n\tat com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:283)\n\tat com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:282)\n\tat com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:62)\n\tat com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:318)\n\tat com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:303)\n\tat com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:62)\n\tat com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:645)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:622)\n\tat scala.util.Try$.apply(Try.scala:213)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:614)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:533)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:568)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:438)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)\n\tat com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:232)\n\tat java.lang.Thread.run(Thread.java:748)<\/pre>\n<p style=\"text-align: justify;\">\u89e3\u6c7a\u65b9\u6cd5\u662f\u5728 Spark Conf \u88e1\u9762\u5728\u5916\u52a0\u4ee5\u4e0b\u4e09\u500b conf \u8a2d\u5b9a\uff0c\u4e4b\u5f8c\u5c31\u53ef\u4ee5\u6210\u529f\u5275\u5efa Database \u8207 Table \u4e86\u3002<\/p>\n<pre class=\"lang:bash\">datanucleus.autoCreateSchema true\ndatanucleus.fixedDatastore false<br \/>datanucleus.autoCreateTables true<\/pre>\n<p style=\"text-align: justify;\">\u89c0\u5bdf\u5728 SQL Database \u5167\u90e8\u7684\u8868\u683c\uff0c\u767c\u73fe\u5927\u90e8\u5206\u7684 Tables \u90fd\u6709\u88ab\u81ea\u52d5\u5efa\u51fa\u4f86\uff0c\u53e6\u5916 dbo.VERSION \u9019\u500b\u8868\u683c\u5167\u5b58\u6709\u4e00\u500b SCHEMA VERSION = 1.2.0 \u7684\u7d00\u9304\u3002<\/p>\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"478\" src=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server-1024x478.png\" alt=\"\" class=\"wp-image-8732\" srcset=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server-1024x478.png 1024w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server-300x140.png 300w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server-768x358.png 768w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server-1536x717.png 1536w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/metastore_with_sql_server.png 1920w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n<p>Note: \u770b\u5230\u9019\u500b\u932f\u8aa4\u8a0a\u606f\u7684\u6642\u5019\uff0c\u53c8\u770b\u5012 Azure Databricks \u95dc\u65bc <a href=\"https:\/\/docs.microsoft.com\/en-us\/azure\/databricks\/data\/metastores\/external-hive-metastore\">Hive Meta Version \u8207 Runtimes \u7248\u865f\u7684\u914d\u5c0d<\/a>\uff0c\u4ee5\u70ba\u662f\u56e0\u70ba spark.sql.hive.metastore.version \u8a2d\u5b9a\u932f\u8aa4\u624d\u6703\u5c0e\u81f4\u7121\u6cd5\u521d\u59cb\u5316SessionHiveMetaStoreClient\uff0c\u5c07\u7248\u672c\u63d0\u5347\u5230 3.1.0 \u4e4b\u5f8c\u9084\u662f\u7121\u6cd5\u6210\u529f\u5275\u5efa Database\uff0c\u95dc\u65bc\u5982\u4f55\u4f7f\u7528\u8f03\u65b0\u7684 Hive Metastore Version \u6703\u5728\u4e0b\u65b9\u9673\u8ff0\u3002<\/p>\n<h5>\u5229\u7528 Azure SQL Database \u7576\u6210 Databricks \u7684 Metastore (Hive Metastore 2.0 \u4ee5\u4e0a) <a href=\"https:\/\/www.thebestcsharpprogrammerintheworld.com\/2022\/01\/27\/external-apache-hive-metastore-azure-databricks-azure-sql\/\">\u9023\u7d50<\/a><\/h5>\n<p>\u5728 Azure \u5b98\u65b9\u7db2\u7ad9\u4e0a\u6211\u5011\u89c0\u5bdf\u5230 Databricks \u4e5f\u652f\u63f4 Hive Metastore 2.0 \u4ee5\u4e0a\u7684\u7248\u672c\uff0c\u5982\u4e0a\u5099\u8a3b\u6240\u8b1b\u6240\u4ee5\u5617\u8a66\u5be6\u4f5c\u5c07 Spark conf \u88e1\u9762\u7684 spark.sql.hive.metastore.version \u6539\u6210 2.3.7 \u6216\u662f 3.1.0 \u90fd\u6703\u9047\u5230\u4ee5\u4e0b\u7684\u6c92\u6709\u8fa6\u6cd5\u521d\u59cb\u5316 SessionHiveMetaStoreClien \u7684\u932f\u8aa4\u8a0a\u606f\uff1a<\/p>\n<pre class=\"lang:bash\">AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\nCaused by: HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\nCaused by: RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\nCaused by: InvocationTargetException: \nCaused by: MetaException: Version information not found in metastore. \n&amp;lt;div class=&amp;quot;ansiout&amp;quot;&amp;gt;\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$withClient$2(HiveExternalCatalog.scala:160)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.maybeSynchronized(HiveExternalCatalog.scala:112)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$withClient$1(HiveExternalCatalog.scala:150)\n\tat com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:377)\n\tat com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:363)\n\tat com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressReporter.scala:34)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:149)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:310)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:228)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:218)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:59)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$hiveCatalog$1(HiveSessionStateBuilder.scala:74)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.externalCatalog$lzycompute(SessionCatalog.scala:544)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.externalCatalog(SessionCatalog.scala:544)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalogImpl.listDatabases(SessionCatalog.scala:771)\n\tat com.databricks.sql.managedcatalog.ManagedCatalogSessionCatalog.listDatabasesWithCatalog(ManagedCatalogSessionCatalog.scala:601)\n\tat com.databricks.sql.managedcatalog.UnityCatalogV2Proxy.listNamespaces(UnityCatalogV2Proxy.scala:124)\n\tat org.apache.spark.sql.execution.datasources.v2.ShowNamespacesExec.run(ShowNamespacesExec.scala:42)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)<\/pre>\n<p style=\"text-align: justify;\">\u5617\u8a66\u5f88\u591a\u65b9\u6cd5\u4e4b\u5f8c\uff0c\u6211\u5011\u5728 <a href=\"https:\/\/www.thebestcsharpprogrammerintheworld.com\/2022\/01\/27\/external-apache-hive-metastore-azure-databricks-azure-sql\/\">External Apache Hive metastore, Azure Databricks, Azure SQL<\/a> \u9019\u7bc7\u6587\u7ae0\u88e1\u9762\u627e\u5230\u89e3\u7b54\uff0c\u53ea\u8981\u5c07 Spark Conf \u5167\u5169\u500b property \u62ff\u6389\uff0c\u53e6\u5916\u52a0\u4e0a datanucleus.schema.autoCreateTables true\uff0c\u5c31\u53ef\u4ee5\u6210\u529f\u5275\u5efa Database \u8207 Table\u3002<\/p>\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"420\" src=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases-1024x420.png\" alt=\"\" class=\"wp-image-8735\" srcset=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases-1024x420.png 1024w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases-300x123.png 300w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases-768x315.png 768w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases-1536x630.png 1536w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/08\/databricks_databases.png 1920w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n<pre class=\"lang:bash\">spark.sql.hive.metastore.jars maven\nspark.sql.hive.metastore.version 1.2.1<\/pre>\n<p>\u5099\u8a3b\uff1a\u6b64\u6642 Hive Metastore 3.1.0 \u7684 Sql Database \u5167\u7684 dbo.VERSION \u88e1\u9762\u662f\u6c92\u6709 record \u7684\u3002<\/p>\n<h5>\u5229\u7528\u81ea\u5efa\u7684 MySQL Database \u7576\u6210 Hive Metastore <a href=\"https:\/\/www.guru99.com\/hive-metastore-configuration-mysql.html\">\u9023\u7d50<\/a><\/h5>\n<p>\u9019\u90e8\u5206\u6211\u5011\u6703\u53e6\u5916\u82b1\u4e00\u7bc7\u4f86\u8b1b\u89e3\u3002<\/p>\n<h2>Unity Catalog<\/h2>\n<p>\u76f4\u5230\u76ee\u524d\u70ba\u6b62\uff0c\u6211\u5011\u53ea\u80fd\u5920\u5275\u5efa Databases \u8207 Tables\uff0c\u4f46\u662f\u5982\u679c\u6211\u5011\u60f3\u8981\u7684\u67b6\u69cb\u662f three-level schema \u67b6\u69cb\u7684\u8a71\uff0c\u6211\u5011\u5c31\u6703\u9700\u8981 Unity Catalog\u3002<\/p>\n\n\n<figure class=\"wp-block-embed is-type-video is-provider-youtube wp-block-embed-youtube wp-embed-aspect-16-9 wp-has-aspect-ratio\"><div class=\"wp-block-embed__wrapper\">\n<iframe loading=\"lazy\" title=\"Advancing Spark - First Look at Unity Catalog\" width=\"910\" height=\"512\" src=\"https:\/\/www.youtube.com\/embed\/FCuuFGS3jFM?start=281&#038;feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\" allowfullscreen><\/iframe>\n<\/div><\/figure>\n\n\n<h5>\u95dc\u65bc\u6211\u5011\u5176\u4ed6 Hive \u7684\u6587\u7ae0\u53ef\u4ee5<a href=\"https:\/\/myoceane.fr\/?s=Hive\">\u53c3\u8003\u9023\u7d50<\/a><\/h5>","protected":false},"excerpt":{"rendered":"<p>\u5728\u958b\u767c Spark \u8207 Deltalake \u7684\u61c9\u7528\u7684\u6642\u5019\uff0c\u9700\u8981\u5efa\u7acb\u5f88\u591a\u7684 Table \u8207 Database \u7b49\u8cc7\u6e90\uff0c\u9019\u4e9b Table \u7684\u8cc7\u6e90\u7a76\u7adf\u662f\u600e\u9ebc\u7ba1\u7406\u7684\uff1f\u5c31\u662f Hive Metastore \u7684\u89d2\u8272\uff0c\u6211\u5011\u5728\u5f88\u81ea\u7136\u4f7f\u7528 Spark SQL \u7684\u6642\u5019\uff0c\u662f\u5426\u771f\u6b63\u4e86\u89e3\u80cc\u5f8c\u767c\u751f\u4e86\u4ec0\u9ebc\u4e8b\u60c5\uff1f\u672c\u7bc7\u6211\u5011\u7d00\u9304\u5982\u4f55\u5728 Databricks \u4e0a\u9762\u4f7f\u7528\u5ba2\u88fd\u5316\u7684 Hive Metastore\u3002<\/p>\n","protected":false},"author":1,"featured_media":1717,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[9,1491,14],"tags":[179,1573],"class_list":["post-8712","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-bigdata-ml","category-cloud","category-it-technology","tag-azure","tag-metastore"],"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v24.6 - https:\/\/yoast.com\/wordpress\/plugins\/seo\/ -->\n<title>[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\" \/>\n<meta property=\"og:locale\" content=\"en_US\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\" \/>\n<meta property=\"og:description\" content=\"\u5728\u958b\u767c Spark \u8207 Deltalake \u7684\u61c9\u7528\u7684\u6642\u5019\uff0c\u9700\u8981\u5efa\u7acb\u5f88\u591a\u7684 Table \u8207 Database \u7b49\u8cc7\u6e90\uff0c\u9019\u4e9b Table \u7684\u8cc7\u6e90\u7a76\u7adf\u662f\u600e\u9ebc\u7ba1\u7406\u7684\uff1f\u5c31\u662f Hive Metastore \u7684\u89d2\u8272\uff0c\u6211\u5011\u5728\u5f88\u81ea\u7136\u4f7f\u7528 Spark SQL \u7684\u6642\u5019\uff0c\u662f\u5426\u771f\u6b63\u4e86\u89e3\u80cc\u5f8c\u767c\u751f\u4e86\u4ec0\u9ebc\u4e8b\u60c5\uff1f\u672c\u7bc7\u6211\u5011\u7d00\u9304\u5982\u4f55\u5728 Databricks \u4e0a\u9762\u4f7f\u7528\u5ba2\u88fd\u5316\u7684 Hive Metastore\u3002\" \/>\n<meta property=\"og:url\" content=\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\" \/>\n<meta property=\"og:site_name\" content=\"\u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\" \/>\n<meta property=\"article:published_time\" content=\"2022-08-23T01:55:34+00:00\" \/>\n<meta property=\"article:modified_time\" content=\"2022-09-23T06:45:49+00:00\" \/>\n<meta property=\"og:image\" content=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png\" \/>\n\t<meta property=\"og:image:width\" content=\"2490\" \/>\n\t<meta property=\"og:image:height\" content=\"442\" \/>\n\t<meta property=\"og:image:type\" content=\"image\/png\" \/>\n<meta name=\"author\" content=\"\u6ab8\u6aac\u7238\" \/>\n<meta name=\"twitter:card\" content=\"summary_large_image\" \/>\n<meta name=\"twitter:label1\" content=\"Written by\" \/>\n\t<meta name=\"twitter:data1\" content=\"\u6ab8\u6aac\u7238\" \/>\n\t<meta name=\"twitter:label2\" content=\"Est. reading time\" \/>\n\t<meta name=\"twitter:data2\" content=\"12 minutes\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\/\/schema.org\",\"@graph\":[{\"@type\":\"Article\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#article\",\"isPartOf\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\"},\"author\":{\"name\":\"\u6ab8\u6aac\u7238\",\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"headline\":\"[Hive] Metastore in Azure Databricks\",\"datePublished\":\"2022-08-23T01:55:34+00:00\",\"dateModified\":\"2022-09-23T06:45:49+00:00\",\"mainEntityOfPage\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\"},\"wordCount\":132,\"commentCount\":0,\"publisher\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"image\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage\"},\"thumbnailUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png\",\"keywords\":[\"Azure\",\"Metastore\"],\"articleSection\":[\"Big Data &amp; Machine Learning\",\"Cloud\",\"IT Technology\"],\"inLanguage\":\"en-US\",\"potentialAction\":[{\"@type\":\"CommentAction\",\"name\":\"Comment\",\"target\":[\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#respond\"]}]},{\"@type\":\"WebPage\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\",\"url\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\",\"name\":\"[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\",\"isPartOf\":{\"@id\":\"https:\/\/myoceane.fr\/#website\"},\"primaryImageOfPage\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage\"},\"image\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage\"},\"thumbnailUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png\",\"datePublished\":\"2022-08-23T01:55:34+00:00\",\"dateModified\":\"2022-09-23T06:45:49+00:00\",\"breadcrumb\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#breadcrumb\"},\"inLanguage\":\"en-US\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/\"]}]},{\"@type\":\"ImageObject\",\"inLanguage\":\"en-US\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage\",\"url\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png\",\"contentUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png\",\"width\":2490,\"height\":442},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"Home\",\"item\":\"https:\/\/myoceane.fr\/\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"[Hive] Metastore in Azure Databricks\"}]},{\"@type\":\"WebSite\",\"@id\":\"https:\/\/myoceane.fr\/#website\",\"url\":\"https:\/\/myoceane.fr\/\",\"name\":\"M-Y-Oceane \u60f3\u65b9\u6d89\u6cd5\u3002\u91cf\u74f6\u5916\u7684\u5929\u7a7a\",\"description\":\"\u60f3\u65b9\u6d89\u6cd5, France, Taiwan, Health, Information Technology\",\"publisher\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\/\/myoceane.fr\/?s={search_term_string}\"},\"query-input\":{\"@type\":\"PropertyValueSpecification\",\"valueRequired\":true,\"valueName\":\"search_term_string\"}}],\"inLanguage\":\"en-US\"},{\"@type\":[\"Person\",\"Organization\"],\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\",\"name\":\"\u6ab8\u6aac\u7238\",\"image\":{\"@type\":\"ImageObject\",\"inLanguage\":\"en-US\",\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/image\/\",\"url\":\"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g\",\"contentUrl\":\"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g\",\"caption\":\"\u6ab8\u6aac\u7238\"},\"logo\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/image\/\"},\"url\":\"https:\/\/myoceane.fr\/index.php\/author\/johnny5584767gmail-com\/\"}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/","og_locale":"en_US","og_type":"article","og_title":"[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","og_description":"\u5728\u958b\u767c Spark \u8207 Deltalake \u7684\u61c9\u7528\u7684\u6642\u5019\uff0c\u9700\u8981\u5efa\u7acb\u5f88\u591a\u7684 Table \u8207 Database \u7b49\u8cc7\u6e90\uff0c\u9019\u4e9b Table \u7684\u8cc7\u6e90\u7a76\u7adf\u662f\u600e\u9ebc\u7ba1\u7406\u7684\uff1f\u5c31\u662f Hive Metastore \u7684\u89d2\u8272\uff0c\u6211\u5011\u5728\u5f88\u81ea\u7136\u4f7f\u7528 Spark SQL \u7684\u6642\u5019\uff0c\u662f\u5426\u771f\u6b63\u4e86\u89e3\u80cc\u5f8c\u767c\u751f\u4e86\u4ec0\u9ebc\u4e8b\u60c5\uff1f\u672c\u7bc7\u6211\u5011\u7d00\u9304\u5982\u4f55\u5728 Databricks \u4e0a\u9762\u4f7f\u7528\u5ba2\u88fd\u5316\u7684 Hive Metastore\u3002","og_url":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/","og_site_name":"\u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","article_published_time":"2022-08-23T01:55:34+00:00","article_modified_time":"2022-09-23T06:45:49+00:00","og_image":[{"width":2490,"height":442,"url":"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png","type":"image\/png"}],"author":"\u6ab8\u6aac\u7238","twitter_card":"summary_large_image","twitter_misc":{"Written by":"\u6ab8\u6aac\u7238","Est. reading time":"12 minutes"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"Article","@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#article","isPartOf":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/"},"author":{"name":"\u6ab8\u6aac\u7238","@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"headline":"[Hive] Metastore in Azure Databricks","datePublished":"2022-08-23T01:55:34+00:00","dateModified":"2022-09-23T06:45:49+00:00","mainEntityOfPage":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/"},"wordCount":132,"commentCount":0,"publisher":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"image":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage"},"thumbnailUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png","keywords":["Azure","Metastore"],"articleSection":["Big Data &amp; Machine Learning","Cloud","IT Technology"],"inLanguage":"en-US","potentialAction":[{"@type":"CommentAction","name":"Comment","target":["https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#respond"]}]},{"@type":"WebPage","@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/","url":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/","name":"[Hive] Metastore in Azure Databricks - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","isPartOf":{"@id":"https:\/\/myoceane.fr\/#website"},"primaryImageOfPage":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage"},"image":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage"},"thumbnailUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png","datePublished":"2022-08-23T01:55:34+00:00","dateModified":"2022-09-23T06:45:49+00:00","breadcrumb":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#breadcrumb"},"inLanguage":"en-US","potentialAction":[{"@type":"ReadAction","target":["https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/"]}]},{"@type":"ImageObject","inLanguage":"en-US","@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#primaryimage","url":"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png","contentUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2019\/07\/\u87a2\u5e55\u5feb\u7167-2019-09-04-\u4e0b\u53484.59.11.png","width":2490,"height":442},{"@type":"BreadcrumbList","@id":"https:\/\/myoceane.fr\/index.php\/hive-metastore-service-with-azure\/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Home","item":"https:\/\/myoceane.fr\/"},{"@type":"ListItem","position":2,"name":"[Hive] Metastore in Azure Databricks"}]},{"@type":"WebSite","@id":"https:\/\/myoceane.fr\/#website","url":"https:\/\/myoceane.fr\/","name":"M-Y-Oceane \u60f3\u65b9\u6d89\u6cd5\u3002\u91cf\u74f6\u5916\u7684\u5929\u7a7a","description":"\u60f3\u65b9\u6d89\u6cd5, France, Taiwan, Health, Information Technology","publisher":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/myoceane.fr\/?s={search_term_string}"},"query-input":{"@type":"PropertyValueSpecification","valueRequired":true,"valueName":"search_term_string"}}],"inLanguage":"en-US"},{"@type":["Person","Organization"],"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b","name":"\u6ab8\u6aac\u7238","image":{"@type":"ImageObject","inLanguage":"en-US","@id":"https:\/\/myoceane.fr\/#\/schema\/person\/image\/","url":"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g","contentUrl":"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g","caption":"\u6ab8\u6aac\u7238"},"logo":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/image\/"},"url":"https:\/\/myoceane.fr\/index.php\/author\/johnny5584767gmail-com\/"}]}},"amp_enabled":false,"_links":{"self":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8712","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/comments?post=8712"}],"version-history":[{"count":21,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8712\/revisions"}],"predecessor-version":[{"id":8755,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8712\/revisions\/8755"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/media\/1717"}],"wp:attachment":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/media?parent=8712"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/categories?post=8712"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/tags?post=8712"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}