"nacos-grpc-client-executor--593" #15259 daemon prio=5 os_prio=0 tid=0x00007f0c807bc000 nid=0xa1d waiting on condition [0x00007f0c13067000]
java.lang.Thread.State: TIMED_WAITING (parking)
at sun.misc.Unsafe.park(Native Method)
- parking to wait for <0x00000006c4aa0170> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078)
at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
"nacos-grpc-client-executor--592" #15204 daemon prio=5 os_prio=0 tid=0x00007f0c810ae800 nid=0x9d3 waiting on condition [0x00007f0c15b92000]
java.lang.Thread.State: TIMED_WAITING (parking)
at sun.misc.Unsafe.park(Native Method)
- parking to wait for <0x00000006c4aa0170> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078)
at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
"nacos-grpc-client-executor--591" #15203 daemon prio=5 os_prio=0 tid=0x00007f0c80d6d800 nid=0x9d2 waiting on condition [0x00007f0c1558c000]
java.lang.Thread.State: TIMED_WAITING (parking)
at sun.misc.Unsafe.park(Native Method)
- parking to wait for <0x00000006c4aa0170> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078)
at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
"nacos-grpc-client-executor--527" #15044 daemon prio=5 os_prio=0 tid=0x00007f0c4028c000 nid=0x930 waiting on condition [0x00007f0c1366d000]
java.lang.Thread.State: TIMED_WAITING (parking)
at sun.misc.Unsafe.park(Native Method)
- parking to wait for <0x00000006c1229c98> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078)
at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
信息太少了,不好判断,你有多少个服务连接nacos,有没有用动态网关的功能?
你把所有服务都停掉,只这一个nacos看看还会有这个错不。如果不会报错了,就一个一个服务启动,看看什么时候执行什么操作会让nacos进入这个状态导致内存溢出。
相关可参考内容:https://github.com/alibaba/nacos/issues/4268
https://www.cnblogs.com/cherf/p/17169201.html
https://www.cnblogs.com/cherf/p/17169201.html 根据该文章的相关参考 .我在 项目的网关中找到了相关代码...玛法问下.,..该代码会 造成该问题吗?
就按照我上面说的来,先把相关服务关掉。或者把动态网关的逻辑先注释掉,再看看会不会继续有同样的错。
遇到这种问题,排除法是最好的办法。
关键是关不到...生产环境...测试环境一时模拟不了数据
生产环境不能关,本地环境无法操作,那就没法排查,你总得有一个满足,光看是看不出问题的。毕竟这是nacos服务报错,不是bladex的服务报错
扫一扫访问 Blade技术社区 移动端