IQL
IQL copied to clipboard
关于执行错误SQL会导致engine挂掉
在执行包含 offset in top 等spark不支持 的SQL语句或者错误的SQL语句时,会导致 zookeeper中的/iql/valid_engine 的actor丢失 在并且在spark的8088端口访问出错
在执行包含 offset in top 等spark不支持 的SQL语句或者错误的SQL语句时,会导致 zookeeper中的/iql/valid_engine 的actor丢失 在并且在spark的8088端口访问出错显示错误为Http 500错误
== SQL == SELECT DISTINCT(a.nsrsbh), a.nsrmc,a.scjydz,a.zcdz,a.zcdlxdh,a.scjydlxdh,a.sjgsdq from (SELECT top 3900 from sjjsq_db.dwa_djxx_nsrxx_1) ----------------------------------------------------------------------------------^^^
at org.apache.spark.sql.catalyst.parser.ParseException.withCommand(ParseDriver.scala:239)
at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:115)
at org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:48)
at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:69)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
at iql.engine.adaptor.SelectAdaptor.parse(SelectAdaptor.scala:26)
at iql.engine.IQLSQLExecListener.exitSql(IQLSQLExecListener.scala:53)
at iql.engine.antlr.IQLParser$SqlContext.exitRule(IQLParser.java:248)
at org.antlr.v4.runtime.tree.ParseTreeWalker.exitRule(ParseTreeWalker.java:47)
at org.antlr.v4.runtime.tree.ParseTreeWalker.walk(ParseTreeWalker.java:30)
at org.antlr.v4.runtime.tree.ParseTreeWalker.walk(ParseTreeWalker.java:28)
at iql.engine.ExeActor$.parseStr(ExeActor.scala:342)
at iql.engine.ExeActor$.parse(ExeActor.scala:362)
at iql.engine.ExeActor.parseSQL(ExeActor.scala:206)
at iql.engine.ExeActor$$anonfun$receive$1$$anonfun$applyOrElse$1.apply$mcV$sp(ExeActor.scala:101)
at iql.engine.ExeActor.actorWapper(ExeActor.scala:243)
at iql.engine.ExeActor$$anonfun$receive$1.applyOrElse(ExeActor.scala:67)
at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
at iql.engine.ExeActor.aroundReceive(ExeActor.scala:35)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:527)
at akka.actor.ActorCell.invoke(ActorCell.scala:496)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:257)
at akka.dispatch.Mailbox.run(Mailbox.scala:224)
at akka.dispatch.Mailbox.exec(Mailbox.scala:234)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
19/04/11 13:47:21 WARN main.IqlMain$:
SELECT DISTINCT(a.nsrsbh), a.nsrmc,a.scjydz,a.zcdz,a.zcdlxdh,a.scjydlxdh,a.sjgsdq from (SELECT top 3900 from sjjsq_db.dwa_djxx_nsrxx_1)
line 1:135 extraneous input '<EOF>' expecting {'load', 'LOAD', '.', 'where', 'WHERE', 'as', 'AS', 'save', 'SAVE', 'partitionBy', 'PARTITIONBY', 'coalesce', 'SELECT', ';', 'insert', 'INSERT', 'create', 'CREATE', 'drop', 'DROP', 'refresh', 'REFRESH', 'set', 'SET', 'train', 'TRAIN', 'register', 'REGISTER', 'show', 'SHOW', 'describe', 'DESCRIBE', 'import', 'IMPORT', 'include', 'INCLUDE', 'explain', 'EXPLAIN', 'overwrite', 'OVERWRITE', 'append', 'APPEND', 'errorIfExists', 'ignore', 'IGNORE', 'update', 'UPDATE', 'and', 'AND', '=', IDENTIFIER, BACKQUOTED_IDENTIFIER, SIMPLE_COMMENT, BRACKETED_EMPTY_COMMENT, BRACKETED_COMMENT, '/**/', UNRECOGNIZED, DIGIT} org.apache.spark.sql.catalyst.parser.ParseException: mismatched input 'from' expecting <EOF>(line 1, pos 82)
== SQL == SELECT DISTINCT(a.nsrsbh), a.nsrmc,a.scjydz,a.zcdz,a.zcdlxdh,a.scjydlxdh,a.sjgsdq from (SELECT top 3900 from sjjsq_db.dwa_djxx_nsrxx_1) ----------------------------------------------------------------------------------^^^
at org.apache.spark.sql.catalyst.parser.ParseException.withCommand(ParseDriver.scala:239)
at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:115)
at org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:48)
at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:69)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
at iql.engine.adaptor.SelectAdaptor.parse(SelectAdaptor.scala:26)
at iql.engine.IQLSQLExecListener.exitSql(IQLSQLExecListener.scala:53)
at iql.engine.antlr.IQLParser$SqlContext.exitRule(IQLParser.java:248)
at org.antlr.v4.runtime.tree.ParseTreeWalker.exitRule(ParseTreeWalker.java:47)
at org.antlr.v4.runtime.tree.ParseTreeWalker.walk(ParseTreeWalker.java:30)
at org.antlr.v4.runtime.tree.ParseTreeWalker.walk(ParseTreeWalker.java:28)
at iql.engine.ExeActor$.parseStr(ExeActor.scala:342)
at iql.engine.ExeActor$.parse(ExeActor.scala:362)
at iql.engine.ExeActor.parseSQL(ExeActor.scala:206)
at iql.engine.ExeActor$$anonfun$receive$1$$anonfun$applyOrElse$1.apply$mcV$sp(ExeActor.scala:101)
at iql.engine.ExeActor.actorWapper(ExeActor.scala:243)
at iql.engine.ExeActor$$anonfun$receive$1.applyOrElse(ExeActor.scala:67)
at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
at iql.engine.ExeActor.aroundReceive(ExeActor.scala:35)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:527)
at akka.actor.ActorCell.invoke(ActorCell.scala:496)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:257)
at akka.dispatch.Mailbox.run(Mailbox.scala:224)
at akka.dispatch.Mailbox.exec(Mailbox.scala:234)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
19/04/11 13:47:22 WARN engine.ExeActor: Actor Stop ...
19/04/11 13:47:22 WARN engine.ExeActor: Actor Start ...[/iql/valid_engine/192.168.201.83:2550_actor1]
19/04/11 13:47:22 WARN cluster.YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered!
19/04/11 13:47:22 ERROR spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Promise already completed.
at scala.concurrent.Promise$class.complete(Promise.scala:55)
at scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:153)
at scala.concurrent.Promise$class.success(Promise.scala:86)
at scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:153)
at org.apache.spark.deploy.yarn.ApplicationMaster.org$apache$spark$deploy$yarn$ApplicationMaster$$sparkContextInitialized(ApplicationMaster.scala:423)
at org.apache.spark.deploy.yarn.ApplicationMaster$.sparkContextInitialized(ApplicationMaster.scala:843)
at org.apache.spark.scheduler.cluster.YarnClusterScheduler.postStartHook(YarnClusterScheduler.scala:32)
at org.apache.spark.SparkContext.
@Rache555 我在我这边执行了你的语句,报了一样的错误但是engine没有挂,你那边每次都会挂吗?
@Rache555 另外你的子查询中字段别名为什么用数字,还是说你想查出3900条数据?如果是可以这样写:
SELECT * from sjjsq_db.dwa_djxx_nsrxx_1 limit 3900 as tb_tmp;
SELECT DISTINCT(a.nsrsbh), a.nsrmc,a.scjydz,a.zcdz,a.zcdlxdh,a.scjydlxdh,a.sjgsdq from tb_tmp;
感谢你的回复 执行8-9次错误以后zookeeper中的/iql/valid_engine 目录中就丢失一个actor 然后 spark的8088页面就会报以下错误: HTTP ERROR 500 Problem accessing /proxy/application_1554186834163_0789/. Reason:
Connection to http://centos4:9707 refused
Caused by: org.apache.http.conn.HttpHostConnectException: Connection to http://centos4:9707 refused at org.apache.http.impl.conn.DefaultClientConnectionOperator.openConnection(DefaultClientConnectionOperator.java:190) at org.apache.http.impl.conn.ManagedClientConnectionImpl.open(ManagedClientConnectionImpl.java:294) at org.apache.http.impl.client.DefaultRequestDirector.tryConnect(DefaultRequestDirector.java:643) at org.apache.http.impl.client.DefaultRequestDirector.execute(DefaultRequestDirector.java:479) at org.apache.http.impl.client.AbstractHttpClient.execute(AbstractHttpClient.java:906) at org.apache.http.impl.client.AbstractHttpClient.execute(AbstractHttpClient.java:805) at org.apache.http.impl.client.AbstractHttpClient.execute(AbstractHttpClient.java:784) at org.apache.hadoop.yarn.server.webproxy.WebAppProxyServlet.proxyLink(WebAppProxyServlet.java:203) at org.apache.hadoop.yarn.server.webproxy.WebAppProxyServlet.doGet(WebAppProxyServlet.java:393) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at javax.servlet.http.HttpServlet.service(HttpServlet.java:820) at org.mortbay.jetty.servlet.ServletHolder.handle(ServletHolder.java:511) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1221) at com.google.inject.servlet.FilterChainInvocation.doFilter(FilterChainInvocation.java:66) at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:900) at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:834) at org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebAppFilter.doFilter(RMWebAppFilter.java:148) at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:795) at com.google.inject.servlet.FilterDefinition.doFilter(FilterDefinition.java:163) at com.google.inject.servlet.FilterChainInvocation.doFilter(FilterChainInvocation.java:58) at com.google.inject.servlet.ManagedFilterPipeline.dispatch(ManagedFilterPipeline.java:118) at com.google.inject.servlet.GuiceFilter.doFilter(GuiceFilter.java:113) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter.doFilter(StaticUserWebFilter.java:109) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:622) at org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter.doFilter(DelegationTokenAuthenticationFilter.java:301) at org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:574) at org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilter.doFilter(RMAuthenticationFilter.java:84) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.apache.hadoop.http.HttpServer2$QuotingInputFilter.doFilter(HttpServer2.java:1296) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.apache.hadoop.http.NoCacheFilter.doFilter(NoCacheFilter.java:45) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.apache.hadoop.http.NoCacheFilter.doFilter(NoCacheFilter.java:45) at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212) at org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:399) at org.mortbay.jetty.security.SecurityHandler.handle(SecurityHandler.java:216) at org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182) at org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:767) at org.mortbay.jetty.webapp.WebAppContext.handle(WebAppContext.java:450) at org.mortbay.jetty.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:230) at org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152) at org.mortbay.jetty.Server.handle(Server.java:326) at org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542) at org.mortbay.jetty.HttpConnection$RequestHandler.headerComplete(HttpConnection.java:928) at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:549) at org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:212) at org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404) at org.mortbay.io.nio.SelectChannelEndPoint.run(SelectChannelEndPoint.java:410) at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582) Caused by: java.net.ConnectException: Connection refused (Connection refused) at java.net.PlainSocketImpl.socketConnect(Native Method) at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:589) at org.apache.http.conn.scheme.PlainSocketFactory.connectSocket(PlainSocketFactory.java:127) at org.apache.http.impl.conn.DefaultClientConnectionOperator.openConnection(DefaultClientConnectionOperator.java:180) ... 50 more Caused by:
这个SQL并不是我执行的,但是执行的SQL过程中经常出现以上情况,一个错误SQL执行多次以后就会导致192.168.222.83:2550_actor6 丢失 望指点一二
并且在IQL的页面中的错误提示为: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext. This stopped SparkContext was created at:
org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925) iql.engine.main.IqlMain$.createSpark(IqlMain.scala:54) iql.engine.repl.AbstractSparkInterpreter.sparkCreateContext(AbstractSparkInterpreter.scala:35) iql.engine.repl.SparkInterpreter$$anonfun$start$1.apply$mcV$sp(SparkInterpreter.scala:60) iql.engine.repl.SparkInterpreter$$anonfun$start$1.apply(SparkInterpreter.scala:58) iql.engine.repl.SparkInterpreter$$anonfun$start$1.apply(SparkInterpreter.scala:58) iql.engine.repl.AbstractSparkInterpreter.restoreContextClassLoader(AbstractSparkInterpreter.scala:230) iql.engine.repl.SparkInterpreter.start(SparkInterpreter.scala:58) iql.engine.main.IqlMain$.main(IqlMain.scala:61) iql.engine.main.IqlMain.main(IqlMain.scala) sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) java.lang.reflect.Method.invoke(Method.java:498) org.apache.spark.deploy.yarn.ApplicationMaster$$anon$4.run(ApplicationMaster.scala:721)
The currently active SparkContext was created at:
(No active SparkContext.)
at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:99)
at org.apache.spark.SparkContext.defaultParallelism(SparkContext.scala:2332)
at org.apache.spark.sql.execution.LocalTableScanExec.numParallelism$lzycompute(LocalTableScanExec.scala:46)
at org.apache.spark.sql.execution.LocalTableScanExec.numParallelism(LocalTableScanExec.scala:45)
at org.apache.spark.sql.execution.LocalTableScanExec.rdd$lzycompute(LocalTableScanExec.scala:48)
at org.apache.spark.sql.execution.LocalTableScanExec.rdd(LocalTableScanExec.scala:48)
at org.apache.spark.sql.execution.LocalTableScanExec.doExecute(LocalTableScanExec.scala:52)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:89)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2980)
at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2978)
at iql.engine.IQLSQLExecListener.refreshTableAndView(IQLSQLExecListener.scala:89)
at iql.engine.ExeActor$$anonfun$receive$1$$anonfun$applyOrElse$1.apply$mcV$sp(ExeActor.scala:102)
at iql.engine.ExeActor.actorWapper(ExeActor.scala:243)
at iql.engine.ExeActor$$anonfun$receive$1.applyOrElse(ExeActor.scala:67)
at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
at iql.engine.ExeActor.aroundReceive(ExeActor.scala:35)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:527)
at akka.actor.ActorCell.invoke(ActorCell.scala:496)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:257)
at akka.dispatch.Mailbox.run(Mailbox.scala:224)
at akka.dispatch.Mailbox.exec(Mailbox.scala:234)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
zookeeper中的actor: [zk: 192.168.222.83:2181(CONNECTED) 7] ls /iql/valid_engine [192.168.222.85:2550_actor8, 192.168.222.85:2550_actor7, 192.168.222.85:2550_actor2, 192.168.222.85:2550_actor1, 192.168.222.85:2550_actor4, 192.168.222.85:2550_actor3, 192.168.222.85:2550_actor6, 192.168.222.85:2550_actor5]
[zk: 192.168.222.83:2181(CONNECTED) 8] ls /iql/valid_engine [192.168.222.85:2550_actor8, 192.168.222.85:2550_actor7, 192.168.222.85:2550_actor1, 192.168.222.85:2550_actor4, 192.168.222.85:2550_actor3, 192.168.222.85:2550_actor6, 192.168.222.85:2550_actor5]
@Rache555 加我微信sobigufo