phoenix爆错java.sql.SQLException: ERROR 102 (08001)

最近几天一直报如下错误,发生错误以前没有任何修改hbase的操作!,内容如下

2019-04-25 14:42:41,656 INFO [regionserver/hadoop3/10.163.144.103:16020-shortCompactions-1556154369994] util.QueryUtil: Creating connection with the jdbc url: jdbc:phoenix:hadoop1:2181,hadoop2:2181,hadoop3:2181:2181:/hbase; 2019-04-25 14:42:41,657 WARN [regionserver/hadoop3/10.163.144.103:16020-shortCompactions-1556154369994] coprocessor.UngroupedAggregateRegionObserver: Unable to permanently disable indexes being partially rebuild for PARKINFOLIST1 java.sql.SQLException: ERROR 102 (08001): Malformed connection url. :hadoop1:2181,hadoop2:2181,hadoop3:2181:2181:/hbase; at org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:488) at org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:150) at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver$ConnectionInfo.getMalFormedUrlException(PhoenixEmbeddedDriver.java:208) at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver$ConnectionInfo.create(PhoenixEmbeddedDriver.java:267) at org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:232) at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:150) at org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:221) at java.sql.DriverManager.getConnection(DriverManager.java:664) at java.sql.DriverManager.getConnection(DriverManager.java:208) at org.apache.phoenix.util.QueryUtil.getConnection(QueryUtil.java:390) at org.apache.phoenix.util.QueryUtil.getConnectionOnServer(QueryUtil.java:369) at org.apache.phoenix.util.QueryUtil.getConnectionOnServer(QueryUtil.java:359) at org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.clearTsOnDisabledIndexes(UngroupedAggregateRegionObserver.java:962) at org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver$3.run(UngroupedAggregateRegionObserver.java:953) at org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver$3.run(UngroupedAggregateRegionObserver.java:949) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1746) at org.apache.hadoop.security.SecurityUtil.doAsUser(SecurityUtil.java:448) at org.apache.hadoop.security.SecurityUtil.doAsLoginUser(SecurityUtil.java:429) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hbase.util.Methods.call(Methods.java:39) at org.apache.hadoop.hbase.security.User.runAsLoginUser(User.java:210) at org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.postCompact(UngroupedAggregateRegionObserver.java:949) at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$10.call(RegionCoprocessorHost.java:627) at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1711) at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1789) at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1745) at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postCompact(RegionCoprocessorHost.java:623) at org.apache.hadoop.hbase.regionserver.HStore.moveCompatedFilesIntoPlace(HStore.java:1390) at org.apache.hadoop.hbase.regionserver.HStore.compact(HStore.java:1348) at org.apache.hadoop.hbase.regionserver.HRegion.compact(HRegion.java:2064) at org.apache.hadoop.hbase.regionserver.CompactSplitThread$CompactionRunner.doCompaction(CompactSplitThread.java:534) at org.apache.hadoop.hbase.regionserver.CompactSplitThread$CompactionRunner.run(CompactSplitThread.java:575) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 2019-04-25 14:42:41,662 INFO [regionserver/hadoop3/10.163.144.103:16020-shortCompactions-1556154369994] regionserver.HStore: Completed compaction of 3 (all) file(s) in 0 of PARKINFOLIST1,,1555036637597.6e5cd8899db61931177d70bed6548b6e. into 7c852833ea004931b0d8b6455b09438a(size=81.7 M), total size for store is 81.7 M. This selection was in queue for 0sec, and took 2sec to execute. 2019-04-25 14:42:41,662 INFO [regionserver/hadoop3/10.163.144.103:16020-shortCompactions-1556154369994] regionserver.CompactSplitThread: Completed compaction: Request = regionName=PARKINFOLIST1,,1555036637597.6e5cd8899db61931177d70bed6548b6e., storeName=0, fileCount=3, fileSize=84.5 M (81.7 M, 1.4 M, 1.4 M), priority=7, time=185562167880817; duration=2sec 2019-04-25 14:46:03,793 INFO [LruBlockCacheStatsExecutor] hfile.LruBlockCache: totalSize=363.11 KB, freeSize=395.95 MB, max=396.30 MB, blockCount=8, accesses=177904, hits=147893, hitRatio=83.13%, , cachingAccesses=147903, cachingHits=135604, cachingHitsRatio=91.68%, evictions=2069, evicted=12291, evictedPerRun=5.940550804138184 2019-04-25 14:51:03,793 INFO [LruBlockCacheStatsExecutor] hfile.LruBlockCache: totalSize=363.11 KB, freeSize=395.95 MB, max=396.30 MB, blockCount=8, accesses=177911, hits=147900, hitRatio=83.13%, , cachingAccesses=147910, cachingHits=135611, cachingHitsRatio=91.68%, evictions=2099, evicted=12291, evictedPerRun=5.855645656585693 2019-04-25 14:56:03,793 INFO [LruBlockCacheStatsExecutor] hfile.LruBlockCache: totalSize=363.11 KB, freeSize=395.95 MB, max=396.30 MB, blockCount=8, accesses=177918, hits=147907, hitRatio=83.13%, , cachingAccesses=147917, cachingHits=135618, cachingHitsRatio=91.69%, evictions=2129, evicted=12291, evictedPerRun=5.773132801055908 2019-04-25 15:01:03,793 INFO [LruBlockCacheStatsExecutor] hfile.LruBlockCache: totalSize=363.11 KB, freeSize=395.95 MB, max=396.30 MB, blockCount=8, accesses=177925, hits=147914, hitRatio=83.13%, ,
 
已邀请:

anbbrr - 大数据工程师

赞同来自:

这些都是从hbase日志中看到的!

要回复问题请先登录注册


中国HBase技术社区微信公众号:
hbasegroup

欢迎加入HBase生态+Spark社区钉钉大群