Show
configure infinispan server
$ ./infinispan-server-16.0.5/bin/cli.sh user create admin --password=pass.1234 --groups=admin
start infinispan server
$ ./infinispan-server-16.0.5/bin/server.sh --bind-address=127.0.0.1 --server-config=infinispan.xml --cluster-stack=tcp --port-offset=100 --cluster-name=cluster_127.0.0.1 --node-name=jdg1
2026-03-03 09:27:00,372 INFO [BOOT] JVM Java HotSpot(TM) 64-Bit Server VM Oracle Corporation 21.0.8+12-LTS-250
2026-03-03 09:27:00,376 INFO [BOOT] JVM arguments = [-server, --add-exports, java.naming/com.sun.jndi.ldap=ALL-UNNAMED, --add-opens, java.base/java.util=ALL-UNNAMED, --add-opens, java.base/java.util.concurrent=ALL-UNNAMED, -Xlog:gc*:file=/tmp/tests-clustering/jdg-1/server/log/gc.log:time,uptimemillis:filecount=5,filesize=3M, -Djava.awt.headless=true, -Djava.net.preferIPv4Stack=true, -XX:+ExitOnOutOfMemoryError, -XX:MetaspaceSize=64M, -Xms64m, -Xmx512m, -Dvisualvm.display.name=infinispan-server, -Djava.util.logging.manager=org.infinispan.server.loader.LogManager, -Dinfinispan.server.home.path=/tmp/tests-clustering/jdg-1, -jar, /tmp/tests-clustering/jdg-1/lib/infinispan-server-runtime-16.0.5.jar, --bind-address=127.0.0.1, --server-config=infinispan.xml, --cluster-stack=tcp, --port-offset=100, --cluster-name=cluster_127.0.0.1, --node-name=jdg1]
2026-03-03 09:27:00,377 INFO [BOOT] PID = 93693
2026-03-03 09:27:00,377 INFO [BOOT] Architecture = amd64
2026-03-03 09:27:00,378 INFO [BOOT] OS = Linux 6.18.13-200.fc43.x86_64
2026-03-03 09:27:00,379 INFO [BOOT] Processors = 16
2026-03-03 09:27:00,384 INFO [BOOT] Memory total = 66MiB
2026-03-03 09:27:00,384 INFO [BOOT] Memory max = 512MiB
2026-03-03 09:27:00,395 INFO [o.i.SERVER] ISPN080000: Infinispan Server 16.0.5 starting
2026-03-03 09:27:00,395 INFO [o.i.SERVER] ISPN080017: Server configuration: infinispan.xml
2026-03-03 09:27:00,395 INFO [o.i.SERVER] ISPN080032: Logging configuration: /tmp/tests-clustering/jdg-1/server/conf/log4j2.xml
2026-03-03 09:27:00,598 INFO [o.i.SERVER] ISPN080027: Loaded extension 'query-dsl-filter-converter-factory'
2026-03-03 09:27:00,598 INFO [o.i.SERVER] ISPN080027: Loaded extension 'continuous-query-filter-converter-factory'
2026-03-03 09:27:00,600 INFO [o.i.SERVER] ISPN080027: Loaded extension 'iteration-filter-converter-factory'
2026-03-03 09:27:00,605 INFO [o.i.SERVER] ISPN080027: Loaded extension 'party.iroiro.luajava.jsr223.LuaScriptEngineFactory'
2026-03-03 09:27:00,737 INFO [o.i.CONTAINER] ISPN000974: Virtual threads support: enabled
2026-03-03 09:27:00,831 INFO [o.i.CONTAINER] ISPN000556: Starting user marshaller 'org.infinispan.commons.marshall.ImmutableProtoStreamMarshaller'
2026-03-03 09:27:00,986 INFO [o.i.SERVER] ISPN080018: Connector RESP [address=internal, auth=RESP]
2026-03-03 09:27:00,989 INFO [o.i.SERVER] ISPN080018: Connector Memcached [address=internal, protocol=AUTO, auth=SCRAM-SHA-512,SCRAM-SHA-384,SCRAM-SHA-256,SCRAM-SHA-1,DIGEST-SHA-512,DIGEST-SHA-384,DIGEST-SHA-256,DIGEST-SHA,CRAM-MD5,DIGEST-MD5]
2026-03-03 09:27:01,029 INFO [o.i.SERVER] ISPN080018: Connector Hot Rod [name=hotrod-default, address=internal, protocol=HOTROD/4.1, auth=SCRAM-SHA-512,SCRAM-SHA-384,SCRAM-SHA-256,SCRAM-SHA-1,DIGEST-SHA-512,DIGEST-SHA-384,DIGEST-SHA-256,DIGEST-SHA,CRAM-MD5,DIGEST-MD5]
2026-03-03 09:27:01,091 INFO [o.i.SERVER] ISPN080018: Connector REST [name=rest-default, address=internal, auth=DIGEST]
2026-03-03 09:27:01,101 INFO [o.i.SERVER] ISPN005055: Using transport: Epoll
2026-03-03 09:27:01,143 INFO [o.i.SERVER] ISPN080018: Connector SinglePort [binding=default, address=http://127.0.0.1:11322, routes=REST/Memcached/HotRod/Resp]
2026-03-03 09:27:01,143 INFO [o.i.SERVER] ISPN080034: Server 'local' listening on http://127.0.0.1:11322
2026-03-03 09:27:01,416 INFO [o.i.CONTAINER] ISPN014068: Lucene version: 9.12.3
2026-03-03 09:27:01,636 INFO [o.i.CLUSTER] ISPN000078: Starting JGroups channel `cluster_127.0.0.1` with stack `tcp`
2026-03-03 09:27:01,638 INFO [o.j.JChannel] local_addr: jdg1(v=16.0.5), name: jdg1
2026-03-03 09:27:01,651 INFO [o.j.p.FD_SOCK2] server listening on *:58000
2026-03-03 09:27:03,654 INFO [o.j.p.p.GMS] jdg1(v=16.0.5): no members discovered after 2.00s: creating cluster as coordinator
2026-03-03 09:27:03,670 INFO [o.i.CLUSTER] ISPN000094: Received new cluster view for channel cluster_127.0.0.1: [jdg1(v=16.0.5)|0] (1) [jdg1(v=16.0.5)]
2026-03-03 09:27:03,704 INFO [o.i.CLUSTER] ISPN000079: Channel `cluster_127.0.0.1` local address is `jdg1`, physical addresses are `[192.168.1.22:7900]`
2026-03-03 09:27:03,707 INFO [o.i.CONTAINER] ISPN000389: Loaded global state, version=16.0.5 timestamp=2026-03-03T08:21:18.922240121Z
2026-03-03 09:27:04,076 INFO [o.i.SERVER] ISPN080001: Infinispan Server 16.0.5 started in 4072ms
configure wildfly
$ ./wildfly-40.0.0.Beta1/bin/jboss-cli.sh
embed-server --server-config=standalone-ha.xml
/subsystem=jgroups/channel=ee:write-attribute(name=stack,value=tcp)
/subsystem=transactions:write-attribute(name=node-identifier,value=wildfly1)
/socket-binding-group=standard-sockets/remote-destination-outbound-socket-binding=remote-jdg-server1:add(host=127.0.0.1, port=11322)
batch
/subsystem=infinispan/remote-cache-container=session_data_cc:add(default-remote-cluster=jdg-server-cluster, statistics-enabled=true, properties={infinispan.client.hotrod.auth_username=admin, infinispan.client.hotrod.auth_password=pass.1234})
/subsystem=infinispan/remote-cache-container=session_data_cc/remote-cluster=jdg-server-cluster:add(socket-bindings=[remote-jdg-server1])
run-batch
/subsystem=infinispan/remote-cache-container=session_data_cc:write-attribute(name=modules, value=[org.wildfly.clustering.web.hotrod])
/subsystem=distributable-web/hotrod-session-management=sm_offload:add(remote-cache-container=session_data_cc, granularity=SESSION)
/subsystem=distributable-web/hotrod-session-management=sm_offload/affinity=local:add()
/subsystem=distributable-web/hotrod-session-management=sm_offload_granular:add(remote-cache-container=session_data_cc, granularity=ATTRIBUTE)
/subsystem=distributable-web/hotrod-session-management=sm_offload_granular/affinity=local:add()
/subsystem=distributable-web:write-attribute(name=default-session-management, value=sm_offload)
/subsystem=infinispan/remote-cache-container=session_data_cc:write-attribute(name=marshaller,value=PROTOSTREAM)
start wildfly
$ ./wildfly-40.0.0.Beta1/bin/standalone.sh -b=127.0.0.1 -bmanagement=127.0.0.1 -bprivate=127.0.0.1 --server-config=standalone-ha.xml -Djboss.default.multicast.address=230.0.0.10 -Dprogram.name=wildfly1 -Djboss.node.name=wildfly1
deploy distribute application to wildfly
$ cp cbnc.ear ./wildfly-40.0.0.Beta1/standalone/deployments/
observe the "Connection refused: /127.0.0.1:11222" error
which is weird because Infinispan is running on port 11322 and wildfly is configured to connect to port 11322 as well:
$ ./wildfly-40.0.0.Beta1/bin/standalone.sh -b=127.0.0.1 -bmanagement=127.0.0.1 -bprivate=127.0.0.1 --server-config=standalone-ha.xml -Djboss.default.multicast.address=230.0.0.10 -Dprogram.name=wildfly1 -Djboss.node.name=wildfly1
...
10:32:31,029 ERROR [org.infinispan.HOTROD] (HotRod-client-async-pool-13) ISPN004007: Exception encountered. Retry 11 out of 10: org.infinispan.client.hotrod.exceptions.TransportException:: io.netty.channel.AbstractChannel$AnnotatedConnectException: finishConnect(..) failed with error(-111): Connection refused: /127.0.0.1:11222
at org.infinispan.client.hotrod@16.0.5//org.infinispan.client.hotrod.impl.transport.netty.OperationChannel.handleError(OperationChannel.java:133)
at org.infinispan.client.hotrod@16.0.5//org.infinispan.client.hotrod.impl.transport.netty.OperationChannel.lambda$attemptConnect$0(OperationChannel.java:109)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:604)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:597)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:573)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:506)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:650)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:643)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:132)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.fulfillConnectPromise(AbstractEpollChannel.java:679)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.finishConnect(AbstractEpollChannel.java:698)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.epollOutReady(AbstractEpollChannel.java:567)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:491)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:399)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:998)
at io.netty.netty-common@4.1.131.Final//io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
at org.wildfly.clustering.context@9.0.2.Final//org.wildfly.clustering.context.ContextualExecutor$1.execute(ContextualExecutor.java:96)
at org.wildfly.clustering.context@9.0.2.Final//org.wildfly.clustering.context.Contextualizer$2$1.run(Contextualizer.java:133)
at java.base/java.lang.Thread.run(Thread.java:1583)
Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: finishConnect(..) failed with error(-111): Connection refused: /127.0.0.1:11222
Caused by: java.net.ConnectException: finishConnect(..) failed with error(-111): Connection refused
at io.netty.netty-transport-native-unix-common@4.1.131.Final//io.netty.channel.unix.Errors.newConnectException0(Errors.java:166)
at io.netty.netty-transport-native-unix-common@4.1.131.Final//io.netty.channel.unix.Errors.handleConnectErrno(Errors.java:131)
at io.netty.netty-transport-native-unix-common@4.1.131.Final//io.netty.channel.unix.Socket.finishConnect(Socket.java:359)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.doFinishConnect(AbstractEpollChannel.java:715)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.finishConnect(AbstractEpollChannel.java:692)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe.epollOutReady(AbstractEpollChannel.java:567)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:491)
at io.netty.netty-transport-native-epoll@4.1.131.Final//io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:399)
at io.netty.netty-common@4.1.131.Final//io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:998)
at io.netty.netty-common@4.1.131.Final//io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
at org.wildfly.clustering.context@9.0.2.Final//org.wildfly.clustering.context.ContextualExecutor$1.execute(ContextualExecutor.java:96)
at org.wildfly.clustering.context@9.0.2.Final//org.wildfly.clustering.context.Contextualizer$2$1.run(Contextualizer.java:133)
at java.base/java.lang.Thread.run(Thread.java:1583)