{"id":8757,"date":"2022-09-23T09:41:51","date_gmt":"2022-09-23T07:41:51","guid":{"rendered":"https:\/\/myoceane.fr\/?p=8757"},"modified":"2023-04-12T04:28:44","modified_gmt":"2023-04-12T02:28:44","slug":"hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore","status":"publish","type":"post","link":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/","title":{"rendered":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore"},"content":{"rendered":"<div id=\"fb-root\"><\/div>\n<p style=\"text-align: justify;\">\u64c1\u6709\u4e00\u500b\u81ea\u5df1\u7684 Hive Metastore \u7684\u597d\u8655\u662f\u65b9\u4fbf\u7ba1\u7406\u81ea\u5df1\u7684\u8cc7\u6599\uff0c\u5229\u7528 Hive Metastore \u53ef\u4ee5\u628a\u8cc7\u6599\u8868\u8207\u5927\u6578\u64da\u5e73\u53f0\u4e0a\u9762\u7684\u8cc7\u6599\u95dc\u9023\u8d77\u4f86\u3002Hive Metastore \u53ef\u4ee5\u90e8\u7f72\u5728\u4e0d\u540c\u7684\u8cc7\u6599\u5eab\u4e0a\u9762\uff0c\u4f8b\u5982 MySQL \u6216\u662f Microsoft SQL Database\u3002<\/p>\n<p>\u53c3\u8003 <a href=\"https:\/\/downloads.apache.org\/hive\/\">Apache Hive \u7a0b\u5f0f\u4e0b\u8f09\u9023\u7d50<\/a><\/p>\n<h4>\u521d\u59cb\u5316 Hive Metastore<\/h4>\n<p style=\"text-align: justify;\">\u53ef\u4ee5\u4f7f\u7528 Apache Hive \u5c08\u6848\u4e2d\u7684\u5de5\u5177 schematool\uff0c\u9996\u5148\u53ef\u4ee5\u5f9e <a href=\"https:\/\/dlcdn.apache.org\/hive\/\">https:\/\/dlcdn.apache.org\/hive\/<\/a> \u4e0b\u8f09\u9069\u5408\u7684&nbsp; Apache Hive \u7248\u672c\uff0c\u4f8b\u5982\u4ee5 Apache Hive 3.1.3&nbsp; \u70ba\u4f8b\uff0c\u57f7\u884c\u4ee5\u4e0b\u7684\u6307\u4ee4\uff1a<\/p>\n<pre class=\"lang:bash\">wget https:\/\/dlcdn.apache.org\/hive\/hive-3.1.3\/apache-hive-3.1.3-bin.tar.gz\ntar -xvzf apache-hive-3.1.3-bin.tar.gz\ncd apache-hive-3.1.3-bin\/conf\ncp hive-default.xml.template hive-site.xml<\/pre>\n<p style=\"text-align: justify;\">\u53c3\u8003\u53e6\u5916\u4e00\u7bc7\u8a2d\u5b9a <a href=\"https:\/\/dwbi.org\/pages\/183\">Hive \u5230 MySQL \u4e0a<\/a> \u5728\u8907\u88fd hive-site.xml \u4e4b\u5f8c\u53d6\u4ee3\u6389\u4e0b\u9762\u5217\u51fa\u4f86\u7684\u53c3\u6578\u4e4b\u5f8c\uff0c\u57f7\u884c\u6700\u5f8c\u4e00\u884c\u6307\u4ee4\u3002<\/p>\n<pre class=\"lang:bash\">&lt;name&gt;javax.jdo.option.ConnectionURL&lt;\/name&gt;\n&lt;value&gt;jdbc:mysql:\/\/metastoreserver.mysql.database.azure.com:3306\/metastoredb&lt;\/value&gt;\n\n&lt;name&gt;javax.jdo.option.ConnectionDriverName&lt;\/name&gt;\n&lt;value&gt;com.mysql.cj.jdbc.Driver&lt;\/value&gt;\n\n&lt;name&gt;javax.jdo.option.ConnectionUserName&lt;\/name&gt;\n&lt;value&gt;admin&lt;\/value&gt;\n\n&lt;name&gt;javax.jdo.option.ConnectionPassword&lt;\/name&gt;\n&lt;value&gt;password&lt;\/value&gt;<\/pre>\n<p>\u6700\u5f8c\u5728\u57f7\u884c<\/p>\n<pre class=\"lang:bash\">schematool -dbType mysql -initSchema<\/pre>\n<h6>\u5099\u8a3b\uff1a\u5728\u57f7\u884c\u6700\u5f8c\u9019\u884c\u7a0b\u5f0f\u7684\u6642\u5019\u767c\u751f\u4e00\u9023\u4e32\u7684\u554f\u984c<\/h6>\n<h6>guava-19.0.jar \u7248\u672c\u885d\u7a81<\/h6>\n<p>\u5728 \/apache-hive-3.1.3-bin\/lib \u88e1\u9762\u6709\u4e00\u500b\u9810\u8a2d\u7684 guava-19.0.jar \u9019\u500b\u53ef\u80fd\u6703\u8ddf\u65e2\u6709\u7684 guava \u7248\u672c\u885d\u7a81\u51fa\u73fe\u4e0b\u65b9\u7684\u932f\u8aa4\u8a0a\u606f\uff0c\u79fb\u9664\u5c31\u53ef\u4ee5\u89e3\u6c7a\u9019\u500b\u5831\u932f\u3002<\/p>\n<pre class=\"lang:bash\">root@c4916eacdf8342e0b5ba9cf10a601a11000000:\/tmp\/apache-hive-3.1.3-bin\/bin@ .\/schematool -dbType mysql -userName atgenomix@metastoretest -initSchema\nWARNING: HADOOP_PREFIX has been replaced by HADOOP_HOME. Using value of HADOOP_PREFIX.\nSLF4J: Class path contains multiple SLF4J bindings.\nSLF4J: Found binding in [jar:file:\/tmp\/apache-hive-3.1.3-bin\/lib\/log4j-slf4j-impl-2.17.1.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: Found binding in [jar:file:\/usr\/local\/hadoop-3.3.0\/share\/hadoop\/common\/lib\/slf4j-log4j12-1.7.25.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: See http:\/\/www.slf4j.org\/codes.html@multiple_bindings for an explanation.\nSLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]\nException in thread \"main\" java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava\/lang\/String;Ljava\/lang\/Object;)V\n\tat org.apache.hadoop.conf.Configuration.set(Configuration.java:1380)\n\tat org.apache.hadoop.conf.Configuration.set(Configuration.java:1361)\n\tat org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:536)\n\tat org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:554)\n\tat org.apache.hadoop.mapred.JobConf.&lt;init&gt;(JobConf.java:448)\n\tat org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:5144)\n\tat org.apache.hadoop.hive.conf.HiveConf.&lt;init&gt;(HiveConf.java:5107)\n\tat org.apache.hive.beeline.HiveSchemaTool.&lt;init&gt;(HiveSchemaTool.java:96)\n\tat org.apache.hive.beeline.HiveSchemaTool.main(HiveSchemaTool.java:1473)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.util.RunJar.run(RunJar.java:323)\n\tat org.apache.hadoop.util.RunJar.main(RunJar.java:236)<\/pre>\n<h4>ClassNotFoundException: com.mysql.cj.jdbc.Driver<\/h4>\n<p>\u8981\u89e3\u6c7a\u9019\u500b\u554f\u984c\u53ef\u4ee5\u5229\u7528\u4ee5\u4e0b\u7684\u6307\u4ee4\u4e0b\u8f09\u5230 apache-hive-4.0.0-alpha-1-bin\/lib<\/p>\n<pre class=\"lang:bash\">wget https:\/\/repo1.maven.org\/maven2\/mysql\/mysql-connector-java\/8.0.30\/mysql-connector-java-8.0.30.jar<\/pre>\n<pre class=\"lang:bash\">root@c4916eacdf8342e0b5ba9cf10a601a11000000:\/tmp\/apache-hive-4.0.0-alpha-1-bin\/bin@ .\/schematool -dbType mysql -initSchema\nWARNING: HADOOP_PREFIX has been replaced by HADOOP_HOME. Using value of HADOOP_PREFIX.\nSLF4J: Class path contains multiple SLF4J bindings.\nSLF4J: Found binding in [jar:file:\/tmp\/apache-hive-4.0.0-alpha-1-bin\/lib\/log4j-slf4j-impl-2.17.1.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: Found binding in [jar:file:\/usr\/local\/hadoop-3.3.0\/share\/hadoop\/common\/lib\/slf4j-log4j12-1.7.25.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: See http:\/\/www.slf4j.org\/codes.html@multiple_bindings for an explanation.\nSLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]\nInitializing the schema to: 4.0.0-alpha-1\nMetastore connection URL:\t jdbc:mysql:\/\/metastoreserver.mysql.database.azure.com:3306\/metastoredb\nMetastore connection Driver :\t com.mysql.cj.jdbc.Driver\nMetastore connection User:\t admin\nFailed to load driver\nUnderlying cause: java.lang.ClassNotFoundException : com.mysql.cj.jdbc.Driver\nUse --verbose for detailed stacktrace.\n*** schemaTool failed ***<\/pre>\n<h6>\u6210\u529f\u521d\u59cb\u5316 database \u7684\u8a0a\u606f<\/h6>\n<pre class=\"lang:bash\">root@c4916eacdf8342e0b5ba9cf10a601a11000000:\/tmp\/apache-hive-4.0.0-alpha-1-bin\/bin@ .\/schematool -dbType mysql -userName atgenomix@metastoretest -initSchema\nWARNING: HADOOP_PREFIX has been replaced by HADOOP_HOME. Using value of HADOOP_PREFIX.\nSLF4J: Class path contains multiple SLF4J bindings.\nSLF4J: Found binding in [jar:file:\/tmp\/apache-hive-4.0.0-alpha-1-bin\/lib\/log4j-slf4j-impl-2.17.1.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: Found binding in [jar:file:\/usr\/local\/hadoop-3.3.0\/share\/hadoop\/common\/lib\/slf4j-log4j12-1.7.25.jar!\/org\/slf4j\/impl\/StaticLoggerBinder.class]\nSLF4J: See http:\/\/www.slf4j.org\/codes.html@multiple_bindings for an explanation.\nSLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]\nInitializing the schema to: 4.0.0-alpha-1\nMetastore connection URL:\t jdbc:mysql:\/\/metastoreserver.mysql.database.azure.com:3306\/metastoredb\nMetastore connection Driver :\t com.mysql.cj.jdbc.Driver\nMetastore connection User:\t admin@metastoreserver\nStarting metastore schema initialization to 4.0.0-alpha-1\nInitialization script hive-schema-4.0.0-alpha-1.mysql.sql\n\nInitialization script completed<\/pre>\n<p>\u4ee5\u4e0b\u7232 MySQL \u8cc7\u6599\u5eab\u622a\u5716\uff0c\u4f7f\u7528 Hive Schema Version 4.0.0 \u5c31\u6709\u591a\u4e00\u500b catalogs \u7684\u5c64\u7d1a\uff01<\/p>\n\n\n<figure class=\"wp-block-gallery has-nested-images columns-1 is-cropped wp-block-gallery-1 is-layout-flex wp-block-gallery-is-layout-flex\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"2880\" height=\"1498\" data-id=\"8784\" src=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08.png\" alt=\"\" class=\"wp-image-8784\" srcset=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08.png 2880w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08-300x156.png 300w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08-1024x533.png 1024w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08-768x399.png 768w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08-1536x799.png 1536w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.43.08-2048x1065.png 2048w\" sizes=\"auto, (max-width: 2880px) 100vw, 2880px\" \/><figcaption>Hive Schema Version 4.0.0-alpha-1<\/figcaption><\/figure>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"2880\" height=\"1550\" data-id=\"8783\" src=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44.png\" alt=\"\" class=\"wp-image-8783\" srcset=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44.png 2880w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44-300x161.png 300w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44-1024x551.png 1024w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44-768x413.png 768w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44-1536x827.png 1536w, https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/\u622a\u5716-2022-09-23-\u4e0b\u53484.40.44-2048x1102.png 2048w\" sizes=\"auto, (max-width: 2880px) 100vw, 2880px\" \/><figcaption>Hive Schema Version 2.3.0<\/figcaption><\/figure>\n<\/figure>\n\n\n<h4>\u5f9e Spark 3.3.0 \u9023\u7d50 Hive Metastore 2.3.9 \u6642 Spark Configurations \u7684\u8a2d\u5b9a<\/h4>\n<p>\u76ee\u524d\u518d\u4f7f\u7528 Spark SQL \u7684\u6642\u5019\uff0c\u73fe\u5728\u6709\u6bd4\u8f03\u591a\u7684\u61c9\u7528\u90fd\u662f\u4f7f\u7528 Deltalake&nbsp;<\/p>\n<pre class=\"lang:bash\">spark.ql.extensions io.delta.sql.DeltaSparkSessionExtension\nspark.sql.catalog.spark_catalog org.apache.spark.sql.delta.catalog.DeltaCatalog\nspark.jars.packages io.delta:delta-core_2.12:2.1.0<\/pre>\n<p style=\"text-align: justify;\">\u672c\u7bc4\u4f8b\u662f\u7528 Spark 3.3.0\uff0c\u5728 spark-hive_2.12-3.3.0.jar \u88e1\u9762\u7684\u662f metastore.version \u662f 2.3.9\uff0c\u6240\u4ee5\u5982\u679c Hive Metastore \u7528\u7684\u662f 2.3.0 \u7248\u672c\u7684\u8a71\u5c31\u53ef\u4ee5\u76f4\u63a5\u9023\u7d50\u3002<\/p>\n<pre class=\"lang:bash\">spark.sql.hive.metastore.version 2.3.9\nspark.hadoop.datanucleus.autoCreateSchema true\nspark.hadoop.datanucleus.fixedDatastore false\nspark.hadoop.datanucleus.schema.autoCreateTables true\nspark.databricks.delta.schema.autoMerge.enabled true<\/pre>\n<p>\u5982\u679c\u8981\u5229\u7528 Spark \u53bb\u9023\u7d50 Hive Metastore \u5c31\u9700\u8981\u4ee5\u4e0b\u7684\u8a2d\u5b9a\uff1a<\/p>\n<pre class=\"lang:bash\">spark.hadoop.javax.jdo.option.ConnectionUserName admin@metastoreserver\nspark.hadoop.javax.jdo.option.ConnectionURL jdbc:mysql:\/\/metastoreserver.mysql.database.azure.com:3306\/metastore?createDatabaseIfNotExist=true&amp;serverTimezone=UTC\nspark.hadoop.javax.jdo.option.ConnectionPassword password\nspark.hadoop.javax.jdo.option.ConnectionDriverName com.mysql.cj.jdbc.Driver\nspark.sql.warehouse.dir abfss:\/\/container@storageaccount.dfs.core.windows.net\/user\/hive\/warehouse\/<\/pre>\n\n\n<p>\u66f4\u9032\u4e00\u6b65\u91dd\u5c0d\u9019\u4e9b Spark Configuration \u7684\u8a2d\u5b9a\u7d30\u7bc0\u53ef\u4ee5\u53c3\u8003 <a href=\"https:\/\/spark.apache.org\/docs\/latest\/sql-data-sources-hive-tables.html\">https:\/\/spark.apache.org\/docs\/latest\/sql-data-sources-hive-tables.html<\/a><\/p>\n\n\n<h4>\u5f9e Spark 3.3.0 \u9023\u7d50 Hive Metastore 3.1.3 \u6642\u7684\u8a2d\u5b9a<\/h4>\n<p>\u5982\u679c\u662f\u60f3\u8981\u5f9e Spark 3.3.0 \u53bb\u9023\u7d50 Hive Metastore 2.3.0 \u4ee5\u4e0a\u7684\u7248\u672c\u7684\u8a71\uff0c\u5247\u9700\u8981\u76f8\u5c0d\u591a\u7684\u8a2d\u5b9a\uff0c\u4ee5\u4e0b\u662f\u984d\u5916\u7684\u5176\u4ed6\u689d\u4ef6\u5217\u8868\uff1a<\/p>\n<ol>\n<li>\u5b89\u88dd apache-hive-3.1.3-bin.tar.gz<\/li>\n<li>\u8a2d\u5b9a\u5728 apache-hive-3.1.3 \u88e1\u9762\u7684 hive-site.xml\u00a0<\/li>\n<li>\u5728 spark-defaults.conf \u8a2d\u5b9a\u4ee5\u4e0b\u7684\u53c3\u6578<\/li>\n<li>\u5728 \/opt\/hive\/lib \u653e\u7f6e mysql-connector-java-8.0.30.jar<\/li>\n<li><a href=\"https:\/\/stackoverflow.com\/questions\/67632430\/java-lang-noclassdeffounderror-org-apache-hadoop-hive-ql-metadata-hiveexception\">\u5728 \/opt\/hive\/lib \u653e\u7f6e commons-collections-3.2.2.jar<\/a><\/li>\n<\/ol>\n<pre class=\"lang:bash\">spark.sql.hive.metastore.version   3.1.2\nspark.sql.hive.metastore.jars\t   path\nspark.sql.hive.metastore.jars.path file:\/\/\/opt\/hive\/lib\/*.jar<\/pre>\n<p>\u6e2c\u8a66\u90e8\u5c6c\u6642\u51fa\u73fe\u4ee5\u4e0b\u932f\u8aa4\uff0c\u900f\u904e\u4e0a\u65b9\u7b2c 5 \u9ede\u53ef\u4ee5\u89e3\u6c7a\uff1a<\/p>\n<pre class=\"lang:bash\">2023-04-12T01:29:05,306 ERROR [main] org.apache.hadoop.hive.metastore.RetryingHMSHandler - java.lang.NoClassDefFoundError: org\/apache\/commons\/collections\/CollectionUtils\n\tat org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:5745)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:97)\n\tat com.sun.proxy.$Proxy39.grantPrivileges(Unknown Source)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:830)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:796)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:541)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.&lt;init&gt;(RetryingHMSHandler.java:80)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8678)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStoreClient.&lt;init&gt;(HiveMetaStoreClient.java:169)\n\tat org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.&lt;init&gt;(SessionHiveMetaStoreClient.java:94)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)\n\tat sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n\tat java.lang.reflect.Constructor.newInstance(Constructor.java:423)\n\tat org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.&lt;init&gt;(RetryingMetaStoreClient.java:95)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4306)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4374)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4354)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1662)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1651)\n\tat org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:223)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)\n\tat org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:170)\n\tat org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:168)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:70)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:122)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:122)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1031)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1017)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1009)\n\tat org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listTables(V2SessionCatalog.scala:57)\n\tat org.apache.spark.sql.connector.catalog.DelegatingCatalogExtension.listTables(DelegatingCatalogExtension.java:61)\n\tat org.apache.spark.sql.execution.datasources.v2.ShowTablesExec.run(ShowTablesExec.scala:40)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)\n\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:584)\n\tat org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:176)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:584)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:560)\n\tat org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)\n\tat org.apache.spark.sql.Dataset.&lt;init&gt;(Dataset.scala:220)\n\tat org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)\n\tat org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:23)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:27)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:29)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:31)\n\tat $line14.$read$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:33)\n\tat $line14.$read$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:35)\n\tat $line14.$read$$iw$$iw.&lt;init&gt;(&lt;console&gt;:37)\n\tat $line14.$read$$iw.&lt;init&gt;(&lt;console&gt;:39)\n\tat $line14.$read.&lt;init&gt;(&lt;console&gt;:41)\n\tat $line14.$read$.&lt;init&gt;(&lt;console&gt;:45)\n\tat $line14.$read$.&lt;clinit&gt;(&lt;console&gt;)\n\tat $line14.$eval$.$print$lzycompute(&lt;console&gt;:7)\n\tat $line14.$eval$.$print(&lt;console&gt;:6)\n\tat $line14.$eval.$print(&lt;console&gt;)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)\n\tat scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)\n\tat scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)\n\tat scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)\n\tat scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116)\n\tat scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:41)\n\tat scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)\n\tat scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)\n\tat scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)\n\tat scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:865)\n\tat scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:733)\n\tat scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:435)\n\tat scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:456)\n\tat org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:239)\n\tat org.apache.spark.repl.Main$.doMain(Main.scala:78)\n\tat org.apache.spark.repl.Main$.main(Main.scala:58)\n\tat org.apache.spark.repl.Main.main(Main.scala)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)\n\tat org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)\n\tat org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)\n\tat org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)\n\tat org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)\n\tat org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)\n\tat org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)\n\tat org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)\nCaused by: java.lang.ClassNotFoundException: org.apache.commons.collections.CollectionUtils\n\tat java.net.URLClassLoader.findClass(URLClassLoader.java:387)\n\tat java.lang.ClassLoader.loadClass(ClassLoader.java:418)\n\tat org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.doLoadClass(IsolatedClientLoader.scala:269)\n\tat org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.loadClass(IsolatedClientLoader.scala:258)\n\tat java.lang.ClassLoader.loadClass(ClassLoader.java:351)\n\t... 136 more\n\n2023-04-12T01:29:05,307 ERROR [main] org.apache.hadoop.hive.metastore.RetryingHMSHandler - HMSHandler Fatal error: java.lang.NoClassDefFoundError: org\/apache\/commons\/collections\/CollectionUtils\n\tat org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:5745)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:97)\n\tat com.sun.proxy.$Proxy39.grantPrivileges(Unknown Source)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:830)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:796)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:541)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.&lt;init&gt;(RetryingHMSHandler.java:80)\n\tat org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8678)\n\tat org.apache.hadoop.hive.metastore.HiveMetaStoreClient.&lt;init&gt;(HiveMetaStoreClient.java:169)\n\tat org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.&lt;init&gt;(SessionHiveMetaStoreClient.java:94)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n\tat sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)\n\tat sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n\tat java.lang.reflect.Constructor.newInstance(Constructor.java:423)\n\tat org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.&lt;init&gt;(RetryingMetaStoreClient.java:95)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148)\n\tat org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4306)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4374)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4354)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1662)\n\tat org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1651)\n\tat org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274)\n\tat org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223)\n\tat scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101)\n\tat org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:223)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)\n\tat org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)\n\tat org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:170)\n\tat org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:168)\n\tat org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:70)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:122)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:122)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1031)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1017)\n\tat org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1009)\n\tat org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listTables(V2SessionCatalog.scala:57)\n\tat org.apache.spark.sql.connector.catalog.DelegatingCatalogExtension.listTables(DelegatingCatalogExtension.java:61)\n\tat org.apache.spark.sql.execution.datasources.v2.ShowTablesExec.run(ShowTablesExec.scala:40)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n\tat org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)\n\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)\n\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)\n\tat org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:584)\n\tat org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:176)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:584)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)\n\tat org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n\tat org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:560)\n\tat org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)\n\tat org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)\n\tat org.apache.spark.sql.Dataset.&lt;init&gt;(Dataset.scala:220)\n\tat org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)\n\tat org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n\tat org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:23)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:27)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:29)\n\tat $line14.$read$$iw$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:31)\n\tat $line14.$read$$iw$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:33)\n\tat $line14.$read$$iw$$iw$$iw.&lt;init&gt;(&lt;console&gt;:35)\n\tat $line14.$read$$iw$$iw.&lt;init&gt;(&lt;console&gt;:37)\n\tat $line14.$read$$iw.&lt;init&gt;(&lt;console&gt;:39)\n\tat $line14.$read.&lt;init&gt;(&lt;console&gt;:41)\n\tat $line14.$read$.&lt;init&gt;(&lt;console&gt;:45)\n\tat $line14.$read$.&lt;clinit&gt;(&lt;console&gt;)\n\tat $line14.$eval$.$print$lzycompute(&lt;console&gt;:7)\n\tat $line14.$eval$.$print(&lt;console&gt;:6)\n\tat $line14.$eval.$print(&lt;console&gt;)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)\n\tat scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)\n\tat scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)\n\tat scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)\n\tat scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116)\n\tat scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:41)\n\tat scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)\n\tat scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)\n\tat scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)\n\tat scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:865)\n\tat scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:733)\n\tat scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:435)\n\tat scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:456)\n\tat org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:239)\n\tat org.apache.spark.repl.Main$.doMain(Main.scala:78)\n\tat org.apache.spark.repl.Main$.main(Main.scala:58)\n\tat org.apache.spark.repl.Main.main(Main.scala)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.lang.reflect.Method.invoke(Method.java:498)\n\tat org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)\n\tat org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)\n\tat org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)\n\tat org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)\n\tat org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)\n\tat org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)\n\tat org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)\n\tat org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)\nCaused by: java.lang.ClassNotFoundException: org.apache.commons.collections.CollectionUtils\n\tat java.net.URLClassLoader.findClass(URLClassLoader.java:387)\n\tat java.lang.ClassLoader.loadClass(ClassLoader.java:418)\n\tat org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.doLoadClass(IsolatedClientLoader.scala:269)\n\tat org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.loadClass(IsolatedClientLoader.scala:258)\n\tat java.lang.ClassLoader.loadClass(ClassLoader.java:351)\n\t... 136 more\n\norg.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n  at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:110)\n  at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:223)\n  at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)\n  at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)\n  at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:170)\n  at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:168)\n  at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:70)\n  at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:122)\n  at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:122)\n  at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1031)\n  at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1017)\n  at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listTables(SessionCatalog.scala:1009)\n  at org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listTables(V2SessionCatalog.scala:57)\n  at org.apache.spark.sql.connector.catalog.DelegatingCatalogExtension.listTables(DelegatingCatalogExtension.java:61)\n  at org.apache.spark.sql.execution.datasources.v2.ShowTablesExec.run(ShowTablesExec.scala:40)\n  at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)\n  at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)\n  at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)\n  at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)\n  at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)\n  at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)\n  at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)\n  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n  at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)\n  at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)\n  at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)\n  at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:584)\n  at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:176)\n  at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:584)\n  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)\n  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)\n  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)\n  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)\n  at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:560)\n  at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)\n  at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)\n  at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)\n  at org.apache.spark.sql.Dataset.&lt;init&gt;(Dataset.scala:220)\n  at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)\n  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n  at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)\n  at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)\n  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)\n  at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)\n  ... 47 elided\nCaused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n  at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1666)\n  at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1651)\n  at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394)\n  at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274)\n  at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394)\n  at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223)\n  at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)\n  at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101)\n  ... 91 more\nCaused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient\n  at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:86)\n  at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.&lt;init&gt;(RetryingMetaStoreClient.java:95)\n  at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148)\n  at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119)\n  at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4306)\n  at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4374)\n  at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4354)\n  at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1662)\n  ... 103 more\nCaused by: java.lang.reflect.InvocationTargetException: org.apache.hadoop.hive.metastore.api.MetaException: org\/apache\/commons\/collections\/CollectionUtils\n  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)\n  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)\n  at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)\n  ... 110 more\nCaused by: org.apache.hadoop.hive.metastore.api.MetaException: org\/apache\/commons\/collections\/CollectionUtils\n  at org.apache.hadoop.hive.metastore.RetryingHMSHandler.&lt;init&gt;(RetryingHMSHandler.java:84)\n  at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93)\n  at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8678)\n  at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.&lt;init&gt;(HiveMetaStoreClient.java:169)\n  at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.&lt;init&gt;(SessionHiveMetaStoreClient.java:94)\n  ... 115 more\nCaused by: java.lang.NoClassDefFoundError: org\/apache\/commons\/collections\/CollectionUtils\n  at org.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:5745)\n  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n  at java.lang.reflect.Method.invoke(Method.java:498)\n  at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:97)\n  at com.sun.proxy.$Proxy39.grantPrivileges(Unknown Source)\n  at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:830)\n  at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:796)\n  at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:541)\n  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n  at java.lang.reflect.Method.invoke(Method.java:498)\n  at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)\n  at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)\n  at org.apache.hadoop.hive.metastore.RetryingHMSHandler.&lt;init&gt;(RetryingHMSHandler.java:80)\n  ... 119 more\nCaused by: java.lang.ClassNotFoundException: org.apache.commons.collections.CollectionUtils\n  at java.net.URLClassLoader.findClass(URLClassLoader.java:387)\n  at java.lang.ClassLoader.loadClass(ClassLoader.java:418)\n  at org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.doLoadClass(IsolatedClientLoader.scala:269)\n  at org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.loadClass(IsolatedClientLoader.scala:258)\n  at java.lang.ClassLoader.loadClass(ClassLoader.java:351)\n  ... 136 more<\/pre>\n<p>\u6210\u529f\u5f9e Spark 3.3.0 \u5b58\u53d6 Hive Metastore 3.1.3\u00a0<\/p>\n<pre class=\"lang:bash\">scala&gt; spark.sql(\"SHOW TABLES\").show()\n2023-04-12T01:34:57,414 INFO [main] org.apache.hadoop.hive.conf.HiveConf - Found configuration file file:\/opt\/hive\/conf\/hive-site.xml\nHive Session ID = 95f985d7-ad2b-4379-9853-c23e867bbb5e\n2023-04-12T01:34:57,611 INFO [main] SessionState - Hive Session ID = 95f985d7-ad2b-4379-9853-c23e867bbb5e\n2023-04-12T01:34:57,880 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore\n2023-04-12T01:34:57,905 WARN [main] org.apache.hadoop.hive.metastore.ObjectStore - datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored\n2023-04-12T01:34:57,905 INFO [main] org.apache.hadoop.hive.metastore.ObjectStore - ObjectStore, initialize called\n2023-04-12T01:34:57,905 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Unable to find config file hive-site.xml\n2023-04-12T01:34:57,905 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Found configuration file null\n2023-04-12T01:34:57,905 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Unable to find config file hivemetastore-site.xml\n2023-04-12T01:34:57,906 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Found configuration file null\n2023-04-12T01:34:57,906 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Unable to find config file metastore-site.xml\n2023-04-12T01:34:57,906 INFO [main] org.apache.hadoop.hive.metastore.conf.MetastoreConf - Found configuration file null\n2023-04-12T01:34:58,015 INFO [main] DataNucleus.Persistence - Property datanucleus.cache.level2 unknown - will be ignored\n2023-04-12T01:34:58,131 INFO [main] com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...\n2023-04-12T01:34:58,419 INFO [main] com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.\n2023-04-12T01:34:58,441 INFO [main] com.zaxxer.hikari.HikariDataSource - HikariPool-2 - Starting...\n2023-04-12T01:34:58,506 INFO [main] com.zaxxer.hikari.HikariDataSource - HikariPool-2 - Start completed.\n2023-04-12T01:34:58,564 INFO [main] org.apache.hadoop.hive.metastore.ObjectStore - Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order\"\n2023-04-12T01:34:58,676 INFO [main] org.apache.hadoop.hive.metastore.MetaStoreDirectSql - Using direct SQL, underlying DB is MYSQL\n2023-04-12T01:34:58,677 INFO [main] org.apache.hadoop.hive.metastore.ObjectStore - Initialized ObjectStore\n2023-04-12T01:34:58,817 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:58,818 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:58,818 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:58,819 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:58,819 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:58,819 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,093 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,093 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,094 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,094 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,094 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,094 WARN [main] DataNucleus.MetaData - Metadata has jdbc-type of null yet this is not valid. Ignored\n2023-04-12T01:34:59,741 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - Added admin role in metastore\n2023-04-12T01:34:59,749 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - Added public role in metastore\n2023-04-12T01:34:59,823 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - No user is added in admin role, since config is empty\n2023-04-12T01:34:59,929 INFO [main] org.apache.hadoop.hive.metastore.RetryingMetaStoreClient - RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=root (auth:SIMPLE) retries=1 delay=1 lifetime=0\n2023-04-12T01:34:59,950 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_database: @hive#default\n2023-04-12T01:34:59,951 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=root\tip=unknown-ip-addr\tcmd=get_database: @hive#default\t\n2023-04-12T01:34:59,964 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_database: @hive#global_temp\n2023-04-12T01:34:59,964 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=root\tip=unknown-ip-addr\tcmd=get_database: @hive#global_temp\t\n2023-04-12T01:34:59,973 WARN [main] org.apache.hadoop.hive.metastore.ObjectStore - Failed to get database hive.global_temp, returning NoSuchObjectException\n2023-04-12T01:34:59,975 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_database: @hive#default\n2023-04-12T01:34:59,975 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=root\tip=unknown-ip-addr\tcmd=get_database: @hive#default\t\n2023-04-12T01:34:59,984 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_database: @hive#default\n2023-04-12T01:34:59,984 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=root\tip=unknown-ip-addr\tcmd=get_database: @hive#default\t\n2023-04-12T01:34:59,993 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore - 0: get_tables: db=@hive#default pat=*\n2023-04-12T01:34:59,993 INFO [main] org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=root\tip=unknown-ip-addr\tcmd=get_tables: db=@hive#default pat=*\t\n+---------+---------+-----------+                                               \n|namespace|tableName|isTemporary|\n+---------+---------+-----------+\n|  default| employee|      false|\n+---------+---------+-----------+<\/pre>\n<p>\u00a0<\/p>","protected":false},"excerpt":{"rendered":"<p>\u64c1\u6709\u4e00\u500b\u81ea\u5df1\u7684 Hive Metastore \u7684\u597d\u8655\u662f\u65b9\u4fbf\u7ba1\u7406\u81ea\u5df1\u7684\u8cc7\u6599\uff0c\u5229\u7528 Hive Metastore \u53ef\u4ee5\u628a\u8cc7\u6599\u8868\u8207\u5927\u6578\u64da\u5e73\u53f0\u4e0a\u9762\u7684\u8cc7\u6599\u95dc\u9023\u8d77\u4f86\u3002Hive Metastore \u53ef\u4ee5\u90e8\u7f72\u5728\u4e0d\u540c\u7684\u8cc7\u6599\u5eab\u4e0a\u9762\uff0c\u4f8b\u5982 MySQL \u6216\u662f Microsoft SQL Database\u3002<\/p>\n","protected":false},"author":1,"featured_media":8950,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[9,14],"tags":[1574],"class_list":["post-8757","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-bigdata-ml","category-it-technology","tag-hive-metastore"],"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v24.6 - https:\/\/yoast.com\/wordpress\/plugins\/seo\/ -->\n<title>[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/myoceane.fr\/index.php\/hive-\u5728-spark-\u5b58\u53d6\u81ea\u5df1\u7684-hive-metastore\/\" \/>\n<meta property=\"og:locale\" content=\"en_US\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\" \/>\n<meta property=\"og:description\" content=\"\u64c1\u6709\u4e00\u500b\u81ea\u5df1\u7684 Hive Metastore \u7684\u597d\u8655\u662f\u65b9\u4fbf\u7ba1\u7406\u81ea\u5df1\u7684\u8cc7\u6599\uff0c\u5229\u7528 Hive Metastore \u53ef\u4ee5\u628a\u8cc7\u6599\u8868\u8207\u5927\u6578\u64da\u5e73\u53f0\u4e0a\u9762\u7684\u8cc7\u6599\u95dc\u9023\u8d77\u4f86\u3002Hive Metastore \u53ef\u4ee5\u90e8\u7f72\u5728\u4e0d\u540c\u7684\u8cc7\u6599\u5eab\u4e0a\u9762\uff0c\u4f8b\u5982 MySQL \u6216\u662f Microsoft SQL Database\u3002\" \/>\n<meta property=\"og:url\" content=\"https:\/\/myoceane.fr\/index.php\/hive-\u5728-spark-\u5b58\u53d6\u81ea\u5df1\u7684-hive-metastore\/\" \/>\n<meta property=\"og:site_name\" content=\"\u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\" \/>\n<meta property=\"article:published_time\" content=\"2022-09-23T07:41:51+00:00\" \/>\n<meta property=\"article:modified_time\" content=\"2023-04-12T02:28:44+00:00\" \/>\n<meta property=\"og:image\" content=\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg\" \/>\n\t<meta property=\"og:image:width\" content=\"2560\" \/>\n\t<meta property=\"og:image:height\" content=\"1509\" \/>\n\t<meta property=\"og:image:type\" content=\"image\/jpeg\" \/>\n<meta name=\"author\" content=\"\u6ab8\u6aac\u7238\" \/>\n<meta name=\"twitter:card\" content=\"summary_large_image\" \/>\n<meta name=\"twitter:label1\" content=\"Written by\" \/>\n\t<meta name=\"twitter:data1\" content=\"\u6ab8\u6aac\u7238\" \/>\n\t<meta name=\"twitter:label2\" content=\"Est. reading time\" \/>\n\t<meta name=\"twitter:data2\" content=\"30 minutes\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\/\/schema.org\",\"@graph\":[{\"@type\":\"Article\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#article\",\"isPartOf\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/\"},\"author\":{\"name\":\"\u6ab8\u6aac\u7238\",\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"headline\":\"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore\",\"datePublished\":\"2022-09-23T07:41:51+00:00\",\"dateModified\":\"2023-04-12T02:28:44+00:00\",\"mainEntityOfPage\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/\"},\"wordCount\":125,\"commentCount\":2,\"publisher\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"image\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage\"},\"thumbnailUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg\",\"keywords\":[\"Hive Metastore\"],\"articleSection\":[\"Big Data &amp; Machine Learning\",\"IT Technology\"],\"inLanguage\":\"en-US\",\"potentialAction\":[{\"@type\":\"CommentAction\",\"name\":\"Comment\",\"target\":[\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#respond\"]}]},{\"@type\":\"WebPage\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/\",\"url\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/\",\"name\":\"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane\",\"isPartOf\":{\"@id\":\"https:\/\/myoceane.fr\/#website\"},\"primaryImageOfPage\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage\"},\"image\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage\"},\"thumbnailUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg\",\"datePublished\":\"2022-09-23T07:41:51+00:00\",\"dateModified\":\"2023-04-12T02:28:44+00:00\",\"breadcrumb\":{\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#breadcrumb\"},\"inLanguage\":\"en-US\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/\"]}]},{\"@type\":\"ImageObject\",\"inLanguage\":\"en-US\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage\",\"url\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg\",\"contentUrl\":\"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg\",\"width\":2560,\"height\":1509},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"Home\",\"item\":\"https:\/\/myoceane.fr\/\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore\"}]},{\"@type\":\"WebSite\",\"@id\":\"https:\/\/myoceane.fr\/#website\",\"url\":\"https:\/\/myoceane.fr\/\",\"name\":\"M-Y-Oceane \u60f3\u65b9\u6d89\u6cd5\u3002\u91cf\u74f6\u5916\u7684\u5929\u7a7a\",\"description\":\"\u60f3\u65b9\u6d89\u6cd5, France, Taiwan, Health, Information Technology\",\"publisher\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\"},\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\/\/myoceane.fr\/?s={search_term_string}\"},\"query-input\":{\"@type\":\"PropertyValueSpecification\",\"valueRequired\":true,\"valueName\":\"search_term_string\"}}],\"inLanguage\":\"en-US\"},{\"@type\":[\"Person\",\"Organization\"],\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b\",\"name\":\"\u6ab8\u6aac\u7238\",\"image\":{\"@type\":\"ImageObject\",\"inLanguage\":\"en-US\",\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/image\/\",\"url\":\"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g\",\"contentUrl\":\"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g\",\"caption\":\"\u6ab8\u6aac\u7238\"},\"logo\":{\"@id\":\"https:\/\/myoceane.fr\/#\/schema\/person\/image\/\"},\"url\":\"https:\/\/myoceane.fr\/index.php\/author\/johnny5584767gmail-com\/\"}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/myoceane.fr\/index.php\/hive-\u5728-spark-\u5b58\u53d6\u81ea\u5df1\u7684-hive-metastore\/","og_locale":"en_US","og_type":"article","og_title":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","og_description":"\u64c1\u6709\u4e00\u500b\u81ea\u5df1\u7684 Hive Metastore \u7684\u597d\u8655\u662f\u65b9\u4fbf\u7ba1\u7406\u81ea\u5df1\u7684\u8cc7\u6599\uff0c\u5229\u7528 Hive Metastore \u53ef\u4ee5\u628a\u8cc7\u6599\u8868\u8207\u5927\u6578\u64da\u5e73\u53f0\u4e0a\u9762\u7684\u8cc7\u6599\u95dc\u9023\u8d77\u4f86\u3002Hive Metastore \u53ef\u4ee5\u90e8\u7f72\u5728\u4e0d\u540c\u7684\u8cc7\u6599\u5eab\u4e0a\u9762\uff0c\u4f8b\u5982 MySQL \u6216\u662f Microsoft SQL Database\u3002","og_url":"https:\/\/myoceane.fr\/index.php\/hive-\u5728-spark-\u5b58\u53d6\u81ea\u5df1\u7684-hive-metastore\/","og_site_name":"\u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","article_published_time":"2022-09-23T07:41:51+00:00","article_modified_time":"2023-04-12T02:28:44+00:00","og_image":[{"width":2560,"height":1509,"url":"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg","type":"image\/jpeg"}],"author":"\u6ab8\u6aac\u7238","twitter_card":"summary_large_image","twitter_misc":{"Written by":"\u6ab8\u6aac\u7238","Est. reading time":"30 minutes"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"Article","@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#article","isPartOf":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/"},"author":{"name":"\u6ab8\u6aac\u7238","@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"headline":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore","datePublished":"2022-09-23T07:41:51+00:00","dateModified":"2023-04-12T02:28:44+00:00","mainEntityOfPage":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/"},"wordCount":125,"commentCount":2,"publisher":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"image":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage"},"thumbnailUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg","keywords":["Hive Metastore"],"articleSection":["Big Data &amp; Machine Learning","IT Technology"],"inLanguage":"en-US","potentialAction":[{"@type":"CommentAction","name":"Comment","target":["https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#respond"]}]},{"@type":"WebPage","@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/","url":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/","name":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore - \u60f3\u65b9\u6d89\u6cd5 - \u91cf\u74f6\u5916\u7684\u5929\u7a7a M-Y-Oceane","isPartOf":{"@id":"https:\/\/myoceane.fr\/#website"},"primaryImageOfPage":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage"},"image":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage"},"thumbnailUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg","datePublished":"2022-09-23T07:41:51+00:00","dateModified":"2023-04-12T02:28:44+00:00","breadcrumb":{"@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#breadcrumb"},"inLanguage":"en-US","potentialAction":[{"@type":"ReadAction","target":["https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/"]}]},{"@type":"ImageObject","inLanguage":"en-US","@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#primaryimage","url":"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg","contentUrl":"https:\/\/myoceane.fr\/wp-content\/uploads\/2022\/09\/HiveMetastore-scaled.jpeg","width":2560,"height":1509},{"@type":"BreadcrumbList","@id":"https:\/\/myoceane.fr\/index.php\/hive-%e5%9c%a8-spark-%e5%ad%98%e5%8f%96%e8%87%aa%e5%b7%b1%e7%9a%84-hive-metastore\/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Home","item":"https:\/\/myoceane.fr\/"},{"@type":"ListItem","position":2,"name":"[Hive] \u5728 Spark \u5b58\u53d6\u81ea\u5df1\u7684 Hive Metastore"}]},{"@type":"WebSite","@id":"https:\/\/myoceane.fr\/#website","url":"https:\/\/myoceane.fr\/","name":"M-Y-Oceane \u60f3\u65b9\u6d89\u6cd5\u3002\u91cf\u74f6\u5916\u7684\u5929\u7a7a","description":"\u60f3\u65b9\u6d89\u6cd5, France, Taiwan, Health, Information Technology","publisher":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b"},"potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/myoceane.fr\/?s={search_term_string}"},"query-input":{"@type":"PropertyValueSpecification","valueRequired":true,"valueName":"search_term_string"}}],"inLanguage":"en-US"},{"@type":["Person","Organization"],"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/4a4552fb8c27693083d465e12db7658b","name":"\u6ab8\u6aac\u7238","image":{"@type":"ImageObject","inLanguage":"en-US","@id":"https:\/\/myoceane.fr\/#\/schema\/person\/image\/","url":"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g","contentUrl":"https:\/\/secure.gravatar.com\/avatar\/6cc678684664f8ad45a8d56a6630b183?s=96&d=mm&r=g","caption":"\u6ab8\u6aac\u7238"},"logo":{"@id":"https:\/\/myoceane.fr\/#\/schema\/person\/image\/"},"url":"https:\/\/myoceane.fr\/index.php\/author\/johnny5584767gmail-com\/"}]}},"amp_enabled":false,"_links":{"self":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8757","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/comments?post=8757"}],"version-history":[{"count":48,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8757\/revisions"}],"predecessor-version":[{"id":8989,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/posts\/8757\/revisions\/8989"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/media\/8950"}],"wp:attachment":[{"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/media?parent=8757"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/categories?post=8757"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/myoceane.fr\/index.php\/wp-json\/wp\/v2\/tags?post=8757"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}