diff --git a/README.md b/README.md
index 4482768..9568ede 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-信明橡塑能源&人员工资结算系统后台
+胶东机场能源&人员工资结算系统后台
分离版-SQLServer数据源
始于3.8.2分离版进行的SQLServer改造,测试无误用于生产环境,保持同步更新
1.(已完成)集成分布式锁、动态多数据源
diff --git a/os-ems/src/main/java/com/os/ems/record/mapper/RecordIotenvInstantMapper.java b/os-ems/src/main/java/com/os/ems/record/mapper/RecordIotenvInstantMapper.java
index 3076f1a..b2acb74 100644
--- a/os-ems/src/main/java/com/os/ems/record/mapper/RecordIotenvInstantMapper.java
+++ b/os-ems/src/main/java/com/os/ems/record/mapper/RecordIotenvInstantMapper.java
@@ -4,7 +4,7 @@ import java.util.List;
import java.util.Map;
import com.os.ems.record.domain.RecordIotenvInstant;
-import io.lettuce.core.dynamic.annotation.Param;
+import org.apache.ibatis.annotations.Param;
/**
* 物联网数据Mapper接口
diff --git a/os-ems/src/main/resources/mapper/ems/record/RecordIotenvInstantMapper.xml b/os-ems/src/main/resources/mapper/ems/record/RecordIotenvInstantMapper.xml
index d504e92..f158e13 100644
--- a/os-ems/src/main/resources/mapper/ems/record/RecordIotenvInstantMapper.xml
+++ b/os-ems/src/main/resources/mapper/ems/record/RecordIotenvInstantMapper.xml
@@ -21,7 +21,20 @@ PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
- select objid, monitorId, temperature, humidity, illuminance, noise, concentration, vibration_speed, vibration_displacement, vibration_acceleration, vibration_temp, collectTime, recodeTime from record_iotenv_instant
+ select objid,
+ monitorId,
+ temperature,
+ humidity,
+ illuminance,
+ noise,
+ concentration,
+ vibration_speed,
+ vibration_displacement,
+ vibration_acceleration,
+ vibration_temp,
+ collectTime,
+ recodeTime
+ from record_iotenv_instant
-
+
-
+
-
-
-
+
-
+
-
+
@@ -192,12 +191,12 @@
#{icon},
#{remark},
#{createBy},
- now()
+ sysdate()
)
-
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index c130423..233b042 100644
--- a/pom.xml
+++ b/pom.xml
@@ -10,7 +10,7 @@
XMXS-OS
http://www.ruoyi.vip
- 信明橡塑运维系统
+ 胶东机场运维系统
3.8.7
diff --git a/麒麟系统环境部署命令/1.txt b/麒麟系统环境部署命令/1.txt
new file mode 100644
index 0000000..d31ef76
--- /dev/null
+++ b/麒麟系统环境部署命令/1.txt
@@ -0,0 +1,432 @@
+Last login: Tue Apr 22 21:18:53 2025 from 192.168.10.11
+Last login: Tue Apr 22 17:16:45 2025 from 192.168.10.11
+[root@localhost ~]# ls
+anaconda-ks.cfg original-ks.cfg
+[root@localhost ~]# cd /
+[root@localhost /]# ls
+backup bin boot dev etc home lib lib64 media mnt opt proc root run sbin srv sys tmp usr var
+[root@localhost /]# cat /etc/os-release
+NAME="Kylin Linux Advanced Server"
+VERSION="V10 (Lance)"
+ID="kylin"
+VERSION_ID="V10"
+PRETTY_NAME="Kylin Linux Advanced Server V10 (Lance)"
+ANSI_COLOR="0;31"
+
+[root@localhost /]# uname -a
+Linux localhost.localdomain 4.19.90-52.22.v2207.ky10.aarch64 #1 SMP Tue Mar 14 11:52:45 CST 2023 aarch64 aarch64 aarch64 GNU/Linux
+[root@localhost /]# free -h
+ total used free shared buff/cache available
+Mem: 29Gi 2.6Gi 25Gi 42Mi 1.0Gi 23Gi
+Swap: 15Gi 0B 15Gi
+[root@localhost /]# df -h
+文件系统 容量 已用 可用 已用% 挂载点
+devtmpfs 15G 0 15G 0% /dev
+tmpfs 15G 64K 15G 1% /dev/shm
+tmpfs 15G 31M 15G 1% /run
+tmpfs 15G 0 15G 0% /sys/fs/cgroup
+/dev/mapper/klas-root 380G 12G 368G 4% /
+tmpfs 15G 64K 15G 1% /tmp
+/dev/sda2 1014M 165M 850M 17% /boot
+/dev/sda1 599M 6.5M 593M 2% /boot/efi
+/dev/sdb1 7.3T 52G 7.3T 1% /media/raid1
+tmpfs 3.0G 768K 3.0G 1% /run/user/1000
+tmpfs 3.0G 0 3.0G 0% /run/user/0
+[root@localhost /]# lscpu
+架构: aarch64
+CPU 运行模式: 64-bit
+字节序: Little Endian
+CPU: 64
+在线 CPU 列表: 0-63
+每个核的线程数: 1
+每个座的核数: 64
+座: 1
+NUMA 节点: 8
+厂商 ID: Phytium
+型号: 2
+型号名称: FT-2000+/64
+步进: 0x1
+BogoMIPS: 100.00
+L1d 缓存: 2 MiB
+L1i 缓存: 2 MiB
+L2 缓存: 32 MiB
+NUMA 节点0 CPU: 0-7
+NUMA 节点1 CPU: 8-15
+NUMA 节点2 CPU: 16-23
+NUMA 节点3 CPU: 24-31
+NUMA 节点4 CPU: 32-39
+NUMA 节点5 CPU: 40-47
+NUMA 节点6 CPU: 48-55
+NUMA 节点7 CPU: 56-63
+Vulnerability Itlb multihit: Not affected
+Vulnerability L1tf: Not affected
+Vulnerability Mds: Not affected
+Vulnerability Meltdown: Not affected
+Vulnerability Mmio stale data: Not affected
+Vulnerability Spec store bypass: Not affected
+Vulnerability Spectre v1: Mitigation; __user pointer sanitization
+Vulnerability Spectre v2: Not affected
+Vulnerability Srbds: Not affected
+Vulnerability Tsx async abort: Not affected
+标记: fp asimd evtstrm crc32 cpuid
+[root@localhost /]# ^C
+
+[root@localhost ~]# mkdir -p ~/tidb-deploy
+[root@localhost ~]# cd ~/tidb-deploy
+[root@localhost tidb-deploy]# tar -zxvf /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz.tabby-upload
+tidb-community-server-v8.5.1-linux-arm64/
+tidb-community-server-v8.5.1-linux-arm64/diag-v1.6.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/7.influxdb.json
+tidb-community-server-v8.5.1-linux-arm64/387.tidb-dashboard.json
+tidb-community-server-v8.5.1-linux-arm64/925.playground.json
+tidb-community-server-v8.5.1-linux-arm64/blackbox_exporter-v0.23.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/6474.pd.json
+tidb-community-server-v8.5.1-linux-arm64/8760.ctl.json
+tidb-community-server-v8.5.1-linux-arm64/timestamp.json
+tidb-community-server-v8.5.1-linux-arm64/influxdb-v2.5.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiup-v1.16.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiflash-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/alertmanager-v0.26.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiup-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tidb-v8.5.1-linux-arm64.tar.gz
+
+gzip: stdin: unexpected end of file
+tar: 归档文件中异常的 EOF
+tar: 归档文件中异常的 EOF
+tar: Error is not recoverable: exiting now
+[root@localhost tidb-deploy]# ^C
+[root@localhost tidb-deploy]# md5sum /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz.tabby-upload
+md5sum: /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz.tabby-upload: 没有那个文件或目录
+[root@localhost tidb-deploy]# md5sum /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz
+f74a3df26fede5680d776d784798ee9a /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz
+[root@localhost tidb-deploy]# cd /root/tidb-deploy
+[root@localhost tidb-deploy]# tar xzvf tidb-community-server-${version}-linux-amd64.tar.gz && \
+> sh tidb-community-server-${version}-linux-amd64/local_install.sh && \
+>
+>
+>
+> ^C
+[root@localhost tidb-deploy]# [root@localhost tidb-deploy]# tar xzvf tidb-community-server-${version}-linux-amd64.tar.gz && \
+> > sh tidb-community-server-${version}-linux-amd64/local_install.sh && \
+>
+>
+> ^C
+[root@localhost tidb-deploy]# tar xzvf tidb-community-server-v8.5.1-linux-amd64.tar.gz
+tar (child): tidb-community-server-v8.5.1-linux-amd64.tar.gz:无法 open: 没有那个文件或目录
+tar (child): Error is not recoverable: exiting now
+tar: Child returned status 2
+tar: Error is not recoverable: exiting now
+[root@localhost tidb-deploy]# tar xzvf tidb-community-server-v8.5.1-linux-amd64.tar.gz && \
+> > sh tidb-community-server-v8.5.1-linux-amd64/local_install.sh && \
+> > source /home/tidb/.bash_profile
+tar (child): tidb-community-server-v8.5.1-linux-amd64.tar.gz:无法 open: 没有那个文件或目录
+tar (child): Error is not recoverable: exiting now
+tar: Child returned status 2
+tar: Error is not recoverable: exiting now
+[root@localhost tidb-deploy]# tar xf tidb-community-toolkit-v8.5.1-linux-amd64.tar.gz
+tar: tidb-community-toolkit-v8.5.1-linux-amd64.tar.gz:无法 open: 没有那个文件或目录
+tar: Error is not recoverable: exiting now
+[root@localhost tidb-deploy]# tar xzvf /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/
+tidb-community-server-v8.5.1-linux-arm64/diag-v1.6.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/7.influxdb.json
+tidb-community-server-v8.5.1-linux-arm64/387.tidb-dashboard.json
+tidb-community-server-v8.5.1-linux-arm64/925.playground.json
+tidb-community-server-v8.5.1-linux-arm64/blackbox_exporter-v0.23.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/6474.pd.json
+tidb-community-server-v8.5.1-linux-arm64/8760.ctl.json
+tidb-community-server-v8.5.1-linux-arm64/timestamp.json
+tidb-community-server-v8.5.1-linux-arm64/influxdb-v2.5.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiup-v1.16.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiflash-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/alertmanager-v0.26.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tiup-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tidb-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/cluster-v1.16.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/2236.diag.json
+tidb-community-server-v8.5.1-linux-arm64/insight-v0.4.2-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/playground-v1.16.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/7.alertmanager.json
+tidb-community-server-v8.5.1-linux-arm64/node_exporter-v1.5.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/1003.tiup.json
+tidb-community-server-v8.5.1-linux-arm64/1003.cluster.json
+tidb-community-server-v8.5.1-linux-arm64/grafana-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/tikv-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/5595.prometheus.json
+tidb-community-server-v8.5.1-linux-arm64/9.node_exporter.json
+tidb-community-server-v8.5.1-linux-arm64/5542.grafana.json
+tidb-community-server-v8.5.1-linux-arm64/snapshot.json
+tidb-community-server-v8.5.1-linux-arm64/root.json
+tidb-community-server-v8.5.1-linux-arm64/keys/
+tidb-community-server-v8.5.1-linux-arm64/keys/dcc60371ff9e7d84-index.json
+tidb-community-server-v8.5.1-linux-arm64/keys/c15eaea99981b351-root.json
+tidb-community-server-v8.5.1-linux-arm64/keys/0fd8aafae75f0a2b-snapshot.json
+tidb-community-server-v8.5.1-linux-arm64/keys/879d996bd9f27ec6-pingcap.json
+tidb-community-server-v8.5.1-linux-arm64/keys/44c82672eb98a1d9-root.json
+tidb-community-server-v8.5.1-linux-arm64/keys/14e2a603e0a16fc6-timestamp.json
+tidb-community-server-v8.5.1-linux-arm64/keys/c67d7c794870f14a-root.json
+tidb-community-server-v8.5.1-linux-arm64/pd-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/9.blackbox_exporter.json
+tidb-community-server-v8.5.1-linux-arm64/tiproxy-v1.3.0-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/18.insight.json
+tidb-community-server-v8.5.1-linux-arm64/ctl-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/565.tiproxy.json
+tidb-community-server-v8.5.1-linux-arm64/local_install.sh
+tidb-community-server-v8.5.1-linux-arm64/6195.tikv.json
+tidb-community-server-v8.5.1-linux-arm64/5991.tiflash.json
+tidb-community-server-v8.5.1-linux-arm64/tidb-dashboard-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/1.root.json
+tidb-community-server-v8.5.1-linux-arm64/10035.tidb.json
+tidb-community-server-v8.5.1-linux-arm64/prometheus-v8.5.1-linux-arm64.tar.gz
+tidb-community-server-v8.5.1-linux-arm64/1.index.json
+[root@localhost tidb-deploy]# sh /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64/local_install.sh
+Disable telemetry success
+Successfully set mirror to /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64
+Detected shell: bash
+Shell profile: /root/.bash_profile
+/root/.bash_profile has been modified to to add tiup to PATH
+open a new terminal or source /root/.bash_profile to use it
+Installed path: /root/.tiup/bin/tiup
+===============================================
+1. source /root/.bash_profile
+2. Have a try: tiup playground
+===============================================
+[root@localhost tidb-deploy]# source /home/tidb/.bash_profile
+-bash: /home/tidb/.bash_profile: 没有那个文件或目录
+[root@localhost tidb-deploy]# cd
+[root@localhost ~]# cd
+[root@localhost ~]# cd
+[root@localhost ~]# source /home/tidb/.bash_profile
+-bash: /home/tidb/.bash_profile: 没有那个文件或目录
+[root@localhost ~]# sh /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64/local_install.sh
+Disable telemetry success
+Successfully set mirror to /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64
+Detected shell: bash
+Shell profile: /root/.bash_profile
+/root/.bash_profile has been modified to to add tiup to PATH
+open a new terminal or source /root/.bash_profile to use it
+Installed path: /root/.tiup/bin/tiup
+===============================================
+1. source /root/.bash_profile
+2. Have a try: tiup playground
+===============================================
+[root@localhost ~]# sh /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64/local_install.sh
+Disable telemetry success
+Successfully set mirror to /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64
+Detected shell: bash
+Shell profile: /root/.bash_profile
+/root/.bash_profile has been modified to to add tiup to PATH
+open a new terminal or source /root/.bash_profile to use it
+Installed path: /root/.tiup/bin/tiup
+===============================================
+1. source /root/.bash_profile
+2. Have a try: tiup playground
+===============================================
+[root@localhost ~]#
+
+[root@localhost ~]# source /root/.bash_profile
+[root@localhost ~]# tar xf /tidb-community-toolkit-v8.5.1-linux-amd64.tar.gz
+[root@localhost ~]# ls -ld /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64 /root/tidb-community-toolkit-v8.5.1-linux-amd64
+drwxr-xr-x 3 root root 4096 1月 17 16:02 /root/tidb-community-toolkit-v8.5.1-linux-amd64
+drwxr-xr-x 3 root root 4096 1月 17 16:00 /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64
+[root@localhost ~]# cd /root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# cp -rp keys ~/.tiup/
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup mirror merge ../tidb-community-toolkit-${version}-linux-amd64
+Error: stat ../tidb-community-toolkit--linux-amd64: no such file or directory
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]#
+
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup mirror merge /root/tidb-community-toolkit-v8.5.1-linux-amd64
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup cluster template > topology.yaml
+
+A new version of cluster is available: -> v1.16.1
+
+ To update this component: tiup update cluster
+ To update all components: tiup update --all
+
+The component `cluster` version is not installed; downloading from repository.
+
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup cluster template > topology.yaml
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup cluster list
+Name User Version Path PrivateKey
+---- ---- ------- ---- ----------
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]# tiup cluster display tidb-test
+
+Error: Cluster tidb-test not found
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-22-22-29-17.log.
+[root@localhost tidb-community-server-v8.5.1-linux-arm64]#
+
+[root@localhost ~]# ssh-keygen -t rsa
+Generating public/private rsa key pair.
+Enter file in which to save the key (/root/.ssh/id_rsa): zang010303
+Enter passphrase (empty for no passphrase):
+Enter same passphrase again:
+Your identification has been saved in zang010303
+Your public key has been saved in zang010303.pub
+The key fingerprint is:
+SHA256:niYxVopzm9Mt9SWaqoZnipyo3xHhjkaPDo+6CQq2Esw root@localhost.localdomain
+The key's randomart image is:
++---[RSA 3072]----+
+| |
+| |
+| . . |
+| . o o |
+|o . = * S . . . |
+|.E = = B + + o |
+|+o+ +.= * + . |
+|*X.+..++ o |
+|%+B o=... |
++----[SHA256]-----+
+[root@localhost ~]# ssh-copy-id root@localhost
+/usr/bin/ssh-copy-id: ERROR: No identities found
+[root@localhost ~]#
+[root@localhost ~]# rm zang010303 zang010303.pub
+rm:是否删除普通文件 'zang010303'?y
+rm:是否删除普通文件 'zang010303.pub'?y
+[root@localhost ~]#
+[root@localhost ~]# ssh-keygen -t rsa
+Generating public/private rsa key pair.
+Enter file in which to save the key (/root/.ssh/id_rsa):
+Created directory '/root/.ssh'.
+Enter passphrase (empty for no passphrase):
+Enter same passphrase again:
+Your identification has been saved in /root/.ssh/id_rsa
+Your public key has been saved in /root/.ssh/id_rsa.pub
+The key fingerprint is:
+SHA256:yk3sqXsspUNG9xcTX91JRdIy22h3ipk92ykp4F52F98 root@localhost.localdomain
+The key's randomart image is:
++---[RSA 3072]----+
+| o+B|
+| . oo=|
+| o B |
+| ... o = +|
+| . .S. O +.|
+| .o=.o. = + +|
+| oo+= .+ ..=E|
+| +.o.o..oo..|
+| o=.. . . |
++----[SHA256]-----+
+[root@localhost ~]# ssh-copy-id root@localhost
+/usr/bin/ssh-copy-id: INFO: Source of key(s) to be installed: "/root/.ssh/id_rsa.pub"
+The authenticity of host 'localhost (::1)' can't be established.
+ECDSA key fingerprint is SHA256:kWUu1O+Rl9nPUDs594h2gNsfUBLfb0WusbxxZZtTNnU.
+Are you sure you want to continue connecting (yes/no/[fingerprint])?
+[root@localhost ~]# ssh-copy-id root@localhost
+\/usr/bin/ssh-copy-id: INFO: Source of key(s) to be installed: "/root/.ssh/id_rsa.pub"
+/usr/bin/ssh-copy-id: INFO: attempting to log in with the new key(s), to filter out any that are already installed
+
+/usr/bin/ssh-copy-id: INFO: 1 key(s) remain to be installed -- if you are prompted now it is to install the new keys
+
+Authorized users only. All activities may be monitored and reported.
+root@localhost's password:
+Permission denied, please try again.
+root@localhost's password:
+Permission denied, please try again.
+root@localhost's password:
+root@localhost: Permission denied (publickey,gssapi-keyex,gssapi-with-mic,password).
+[root@localhost ~]# ssh-copy-id root@localhost
+/usr/bin/ssh-copy-id: INFO: Source of key(s) to be installed: "/root/.ssh/id_rsa.pub"
+/usr/bin/ssh-copy-id: INFO: attempting to log in with the new key(s), to filter out any that are already installed
+/usr/bin/ssh-copy-id: INFO: 1 key(s) remain to be installed -- if you are prompted now it is to install the new keys
+
+Authorized users only. All activities may be monitored and reported.
+root@localhost's password:
+Permission denied, please try again.
+root@localhost's password:
+Permission denied, please try again.
+root@localhost's password:
+/usr/bin/ssh-copy-id: INFO: 1 key(s) remain to be installed -- if you are prompted now it is to install the new keys
+
+Authorized users only. All activities may be monitored and reported.
+root@localhost's password:
+
+Number of key(s) added: 1
+
+Now try logging into the machine, with: "ssh 'root@localhost'"
+and check to make sure that only the key(s) you wanted were added.
+
+[root@localhost ~]# source /root/.bash_profile
+[root@localhost ~]# tiup playground
+
+A new version of playground is available: -> v1.16.1
+
+ To update this component: tiup update playground
+ To update all components: tiup update --all
+
+The component `playground` version is not installed; downloading from repository.
+
+Note: Version constraint is resolved to v8.5.1. If you'd like to use other versions:
+
+ Use exact version: tiup playground v7.1.0
+ Use version range: tiup playground ^5
+ Use nightly: tiup playground nightly
+
+The component `pd` version v8.5.1 is not installed; downloading from repository.
+Start pd instance: v8.5.1
+The component `tikv` version v8.5.1 is not installed; downloading from repository.
+Start tikv instance: v8.5.1
+The component `tidb` version v8.5.1 is not installed; downloading from repository.
+Start tidb instance: v8.5.1
+Waiting for tidb instances ready
+127.0.0.1:4000 ... Done
+The component `prometheus` version v8.5.1 is not installed; downloading from repository.
+The component `tiflash` version v8.5.1 is not installed; downloading from repository.
+Start tiflash instance: v8.5.1
+Waiting for tiflash instances ready
+127.0.0.1:3930 ... Done
+
+🎉 TiDB Playground Cluster is started, enjoy!
+
+Connect TiDB: mysql --comments --host 127.0.0.1 --port 4000 -u root
+TiDB Dashboard: http://127.0.0.1:2379/dashboard
+Grafana: http://127.0.0.1:3000
+
+
+
+[root@localhost ~]# mysql --host 127.0.0.1 --port 4000 -u rootmysql --host 127.0.0.1 --port 4000 -u root
+ERROR 2002 (HY000): Can't connect to MySQL server on '127.0.0.1' (115)
+[root@localhost ~]# ^C
+[root@localhost ~]# ps -ef | grep tidb
+root 4012 3574 0 15:18 pts/0 00:00:00 grep tidb
+[root@localhost ~]# ^C
+[root@localhost ~]# tiup playground
+
+Note: Version constraint is resolved to v8.5.1. If you'd like to use other versions:
+
+ Use exact version: tiup playground v7.1.0
+ Use version range: tiup playground ^5
+ Use nightly: tiup playground nightly
+
+Start pd instance: v8.5.1
+Start tikv instance: v8.5.1
+Start tidb instance: v8.5.1
+Waiting for tidb instances ready
+127.0.0.1:4000 ... Done
+Start tiflash instance: v8.5.1
+Waiting for tiflash instances ready
+127.0.0.1:3930 ... Done
+
+🎉 TiDB Playground Cluster is started, enjoy!
+
+Connect TiDB: mysql --comments --host 127.0.0.1 --port 4000 -u root
+TiDB Dashboard: http://127.0.0.1:2379/dashboard
+Grafana: http://127.0.0.1:3000
+^CGot signal interrupt (Component: playground ; PID: 4049)
+
+Playground receive signal: interrupt
+Wait tiflash(4609) to quit...
+Wait grafana(4603) to quit...
+Wait prometheus(4522) to quit...
+Wait ng-monitoring(4523) to quit...
+Grafana quit
+prometheus quit
+ng-monitoring quit
+tiflash quit
+Wait tidb(4063) to quit...
+tidb quit
+Wait tikv(4062) to quit...
+tikv quit
+Wait pd(4061) to quit...
+pd quit
+
+
diff --git a/麒麟系统环境部署命令/2.txt b/麒麟系统环境部署命令/2.txt
new file mode 100644
index 0000000..9a873fc
--- /dev/null
+++ b/麒麟系统环境部署命令/2.txt
@@ -0,0 +1,3432 @@
+]# vi ssh_config
+[root@localhost ssh]# sudo find / -name topology.yaml 2>/dev/null
+/root/tidb-deploy/tidb-community-server-v8.5.1-linux-arm64/topology.yaml
+/root/topology.yaml
+[root@localhost ssh]# cd /
+[root@localhost /]# cd /root
+[root@localhost ~]# ls
+anaconda-ks.cfg original-ks.cfg tidb-community-toolkit-v8.5.1-linux-amd64 tidb-deploy topology.yaml
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster topology.yaml --user root -p
+
+Error: Expect at least 3 arguments, but received 2 arguments (tui.mismatch_args)
+
+Usage:
+ tiup cluster deploy [flags]
+
+Flags:
+ -h, --help help for deploy
+ -i, --identity_file string The path of the SSH identity file. If specified, public key authentication will be used. (default "/root/.ssh/id_rsa")
+ --ignore-config-check Ignore the config check result of components
+ --no-labels Don't check TiKV labels
+ -p, --password Use password of target hosts. If specified, password authentication will be used.
+ --skip-create-user (EXPERIMENTAL) Skip creating the user specified in topology.
+ -u, --user string The user name to login via SSH. The user must has root (or sudo) privilege. (default "root")
+
+Global Flags:
+ -c, --concurrency int max number of parallel tasks allowed (default 5)
+ --format string (EXPERIMENTAL) The format of output, available values are [default, json] (default "default")
+ --ssh string (EXPERIMENTAL) The executor type: 'builtin', 'system', 'none'.
+ --ssh-timeout uint Timeout in seconds to connect host via SSH, ignored for operations that don't need an SSH connection. (default 5)
+ --wait-timeout uint Timeout in seconds to wait for an operation to complete, ignored for operations that don't fit. (default 120)
+ -y, --yes Skip all confirmations and assumes 'yes'
+
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
+
+[root@localhost ~]# df -h /tidb-data
+文件系统 容量 已用 可用 已用% 挂载点
+/dev/mapper/klas-root 380G 36G 345G 10% /
+[root@localhost ~]# fdisk -l
+Disk /dev/sda:446.63 GiB,479559942144 字节,936640512 个扇区
+磁盘型号:MR9361-8i
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+磁盘标签类型:gpt
+磁盘标识符:622B49B2-FEC8-4D7B-B288-4F361748252F
+
+设备 起点 末尾 扇区 大小 类型
+/dev/sda1 2048 1230847 1228800 600M EFI 系统
+/dev/sda2 1230848 3327999 2097152 1G Linux 文件系统
+/dev/sda3 3328000 936638463 933310464 445G Linux LVM
+
+
+Disk /dev/sdb:7.28 TiB,8000450330624 字节,15625879552 个扇区
+磁盘型号:MR9361-8i
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 512 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+磁盘标签类型:gpt
+磁盘标识符:6A2BC416-B62B-4E3D-9DB9-31169B164FC0
+
+设备 起点 末尾 扇区 大小 类型
+/dev/sdb1 2048 15625877503 15625875456 7.3T Linux 文件系统
+
+
+Disk /dev/mapper/klas-root:379.84 GiB,407849926656 字节,796581888 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+
+
+Disk /dev/mapper/klas-swap:15.2 GiB,16315842560 字节,31866880 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+
+
+Disk /dev/mapper/klas-backup:50 GiB,53687091200 字节,104857600 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+[root@localhost ~]# lsblk
+NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
+sda 8:0 0 446.6G 0 disk
+├─sda1 8:1 0 600M 0 part /boot/efi
+├─sda2 8:2 0 1G 0 part /boot
+└─sda3 8:3 0 445G 0 part
+ ├─klas-root 252:0 0 379.8G 0 lvm /
+ ├─klas-swap 252:1 0 15.2G 0 lvm [SWAP]
+ └─klas-backup 252:2 0 50G 0 lvm
+sdb 8:16 0 7.3T 0 disk
+└─sdb1 8:17 0 7.3T 0 part /media/raid1
+[root@localhost ~]#
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 648019974
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databses;
+ERROR 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your TiDB version for the right syntax to use line 1 column 13 near "databses"
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| test |
++--------------------+
+6 rows in set (0.000 sec)
+
+MySQL [(none)]> exit;
+Bye
+[root@localhost ~]# tuip cluster display tidb-cluster
+-bash: tuip:未找到命令
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# tiup cluster list
+Name User Version Path PrivateKey
+---- ---- ------- ---- ----------
+tidb-cluster tidb v8.5.1 /root/.tiup/storage/cluster/clusters/tidb-cluster /root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa
+[root@localhost ~]# df -h /tidb-data
+文件系统 容量 已用 可用 已用% 挂载点
+/dev/mapper/klas-root 380G 36G 345G 10% /
+[root@localhost ~]# fdisk -l
+Disk /dev/sda:446.63 GiB,479559942144 字节,936640512 个扇区
+磁盘型号:MR9361-8i
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+磁盘标签类型:gpt
+磁盘标识符:622B49B2-FEC8-4D7B-B288-4F361748252F
+
+设备 起点 末尾 扇区 大小 类型
+/dev/sda1 2048 1230847 1228800 600M EFI 系统
+/dev/sda2 1230848 3327999 2097152 1G Linux 文件系统
+/dev/sda3 3328000 936638463 933310464 445G Linux LVM
+
+
+Disk /dev/sdb:7.28 TiB,8000450330624 字节,15625879552 个扇区
+磁盘型号:MR9361-8i
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 512 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+磁盘标签类型:gpt
+磁盘标识符:6A2BC416-B62B-4E3D-9DB9-31169B164FC0
+
+设备 起点 末尾 扇区 大小 类型
+/dev/sdb1 2048 15625877503 15625875456 7.3T Linux 文件系统
+
+
+Disk /dev/mapper/klas-root:379.84 GiB,407849926656 字节,796581888 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+
+
+Disk /dev/mapper/klas-swap:15.2 GiB,16315842560 字节,31866880 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+
+
+Disk /dev/mapper/klas-backup:50 GiB,53687091200 字节,104857600 个扇区
+单元:扇区 / 1 * 512 = 512 字节
+扇区大小(逻辑/物理):512 字节 / 4096 字节
+I/O 大小(最小/最佳):262144 字节 / 262144 字节
+[root@localhost ~]# lsblk
+NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
+sda 8:0 0 446.6G 0 disk
+├─sda1 8:1 0 600M 0 part /boot/efi
+├─sda2 8:2 0 1G 0 part /boot
+└─sda3 8:3 0 445G 0 part
+ ├─klas-root 252:0 0 379.8G 0 lvm /
+ ├─klas-swap 252:1 0 15.2G 0 lvm [SWAP]
+ └─klas-backup 252:2 0 50G 0 lvm
+sdb 8:16 0 7.3T 0 disk
+└─sdb1 8:17 0 7.3T 0 part /media/raid1
+[root@localhost ~]# ^C
+[root@localhost ~]# ^C
+[root@localhost ~]# ^C
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# mkdir -p /media/raid1/tidb-data
+[root@localhost ~]# cp -a /tidb-data/* /media/raid1/tidb-data/
+[root@localhost ~]# mv /tidb-data /tidb-data.bak
+[root@localhost ~]# chown -R tidb:tidb /media/raid1/tidb-data
+[root@localhost ~]# chmod -R 750 /media/raid1/tidb-data
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+
+Error: failed to start alertmanager: failed to start: 10.42.0.1 alertmanager-9093.service, please check the instance's log(/tidb-deploy/alertmanager-9093/log) for more detail.: timed out waiting for port 9093 to be started after 2m0s
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-15-40-27.log.
+[root@localhost ~]#
+
+
+
+
+
+
+
+
+
+
+
++ Detect CPU Arch Name
+ - Detecting node 10.0.1.11 Arch info ... Error
+ - Detecting node 10.0.1.12 Arch info ... Error
+ - Detecting node 10.0.1.13 Arch info ... Error
+ - Detecting node 10.0.1.17 Arch info ... Error
+ - Detecting node 10.0.1.18 Arch info ... Error
+ - Detecting node 10.0.1.19 Arch info ... Error
+ - Detecting node 10.0.1.14 Arch info ... Error
+ - Detecting node 10.0.1.15 Arch info ... Error
+ - Detecting node 10.0.1.16 Arch info ... Error
+ - Detecting node 10.0.1.20 Arch info ... Error
+ - Detecting node 10.0.1.21 Arch info ... Error
+ - Detecting node 10.0.1.22 Arch info ... Error
+
+Error: failed to fetch cpu-arch or kernel-name: executor.ssh.execute_failed: Failed to execute command over SSH for 'root@10.0.1.11:22' {ssh_stderr: , ssh_stdout: , ssh_command: export LANG=C; PATH=$PATH:/bin:/sbin:/usr/bin:/usr/sbin; uname -m}, cause: dial tcp 10.0.1.11:22: connect: network is unreachable
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-11-08-27.log.
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# source /root/.bash_profile
+[root@localhost ~]# tiup playground v8.5.1 --host 10.42.0.1
+Start pd instance: v8.5.1
+Start tikv instance: v8.5.1
+Start tidb instance: v8.5.1
+Waiting for tidb instances ready
+10.42.0.1:4000 ... ⠋
+^CGot signal interrupt (Component: playground ; PID: 32567)
+
+Playground receive signal: interrupt
+Waiting for tidb instances ready
+Waiting for tidb instances ready
+10.42.0.1:4000 ... ⠋
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Error
+
+Error: failed to fetch cpu-arch or kernel-name: executor.ssh.execute_failed: Failed to execute command over SSH for 'root@10.42.0.1:22' {ssh_stderr: , ssh_stdout: , ssh_command: export LANG=C; PATH=$PATH:/bin:/sbin:/usr/bin:/usr/sbin; uname -m}, cause: ssh: handshake failed: ssh: unable to authenticate, attempted methods [none], no supported methods remain
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-11-16-16.log.
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Done
+
++ Detect CPU OS Name
+ - Detecting node 10.42.0.1 OS info ... Done
+Please confirm your topology:
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Role Host Ports OS/Arch Directories
+---- ---- ----- ------- -----------
+pd 10.42.0.1 2379/2380 linux/aarch64 /tidb-deploy/pd-2379,/tidb-data/pd-2379
+tikv 10.42.0.1 20160/20180 linux/aarch64 /tidb-deploy/tikv-20160,/tidb-data/tikv-20160
+tidb 10.42.0.1 4000/10080 linux/aarch64 /tidb-deploy/tidb-4000
+tiflash 10.42.0.1 9000/3930/20170/20292/8234/8123 linux/aarch64 /tidb-deploy/tiflash-9000,/tidb-data/tiflash-9000
+prometheus 10.42.0.1 9090/12020 linux/aarch64 /tidb-deploy/prometheus-9090,/tidb-data/prometheus-9090
+grafana 10.42.0.1 3000 linux/aarch64 /tidb-deploy/grafana-3000
+alertmanager 10.42.0.1 9093/9094 linux/aarch64 /tidb-deploy/alertmanager-9093,/tidb-data/alertmanager-9093
+Attention:
+ 1. If the topology is not what you expected, check your yaml file.
+ 2. Please confirm there is no port/directory conflicts in same host.
+Do you want to continue? [y/N]: (default=N) y
++ Generate SSH keys ... Done
++ Download TiDB components
+ - Download pd:v8.5.1 (linux/arm64) ... Done
+ - Download tikv:v8.5.1 (linux/arm64) ... Done
+ - Download tidb:v8.5.1 (linux/arm64) ... Done
+ - Download tiflash:v8.5.1 (linux/arm64) ... Done
+ - Download prometheus:v8.5.1 (linux/arm64) ... Done
+ - Download grafana:v8.5.1 (linux/arm64) ... Done
+ - Download alertmanager: (linux/arm64) ... Done
+ - Download node_exporter: (linux/arm64) ... Done
+ - Download blackbox_exporter: (linux/arm64) ... Done
++ Initialize target host environments
+ - Prepare 10.42.0.1:22 ... Done
++ Deploy TiDB instance
+ - Copy pd -> 10.42.0.1 ... Done
+ - Copy tikv -> 10.42.0.1 ... Done
+ - Copy tidb -> 10.42.0.1 ... Done
+ - Copy tiflash -> 10.42.0.1 ... Done
+ - Copy prometheus -> 10.42.0.1 ... Done
+ - Copy grafana -> 10.42.0.1 ... Done
+ - Copy alertmanager -> 10.42.0.1 ... Done
+ - Deploy node_exporter -> 10.42.0.1 ... Done
+ - Deploy blackbox_exporter -> 10.42.0.1 ... Done
++ Copy certificate to remote host
++ Init instance configs
+ - Generate config pd -> 10.42.0.1:2379 ... Done
+ - Generate config tikv -> 10.42.0.1:20160 ... Done
+ - Generate config tidb -> 10.42.0.1:4000 ... Done
+ - Generate config tiflash -> 10.42.0.1:9000 ... Done
+ - Generate config prometheus -> 10.42.0.1:9090 ... Done
+ - Generate config grafana -> 10.42.0.1:3000 ... Done
+ - Generate config alertmanager -> 10.42.0.1:9093 ... Done
++ Init monitor configs
+ - Generate config node_exporter -> 10.42.0.1 ... Done
+ - Generate config blackbox_exporter -> 10.42.0.1 ... Done
+Enabling component pd
+ Enabling instance 10.42.0.1:2379
+ Enable instance 10.42.0.1:2379 success
+Enabling component tikv
+ Enabling instance 10.42.0.1:20160
+ Enable instance 10.42.0.1:20160 success
+Enabling component tidb
+ Enabling instance 10.42.0.1:4000
+ Enable instance 10.42.0.1:4000 success
+Enabling component tiflash
+ Enabling instance 10.42.0.1:9000
+ Enable instance 10.42.0.1:9000 success
+Enabling component prometheus
+ Enabling instance 10.42.0.1:9090
+ Enable instance 10.42.0.1:9090 success
+Enabling component grafana
+ Enabling instance 10.42.0.1:3000
+ Enable instance 10.42.0.1:3000 success
+Enabling component alertmanager
+ Enabling instance 10.42.0.1:9093
+ Enable instance 10.42.0.1:9093 success
+Enabling component node_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Enabling component blackbox_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Cluster `tidb-cluster` deployed successfully, you can start it with command: `tiup cluster start tidb-cluster --init`
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component tiflash
+ Starting instance 10.42.0.1:9000
+ Start instance 10.42.0.1:9000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 545259528
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| test |
++--------------------+
+6 rows in set (0.000 sec)
+
+MySQL [(none)]>
+MySQL [(none)]> create database tao_iot;
+
+
+^CCtrl-C -- query killed. Continuing normally.
+^CCtrl-C -- query killed. Continuing normally.
+^[
+^[
+
+^CCtrl-C -- exit!
+Aborted
+[root@localhost ~]# show databases;
+-bash: show:未找到命令
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 545259542
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| tao_iot |
+| test |
++--------------------+
+7 rows in set (0.001 sec)
+
+MySQL [(none)]> exit;
+Bye
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Cluster name 'tidb-cluster' is duplicated (deploy.name_dup)
+
+Please specify another cluster name
+
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component tiflash
+ Starting instance 10.42.0.1:9000
+ Start instance 10.42.0.1:9000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tiflash
+ Stopping instance 10.42.0.1
+ Stop tiflash 10.42.0.1:9000 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component tiflash
+ Starting instance 10.42.0.1:9000
+ Start instance 10.42.0.1:9000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:9000 tiflash 10.42.0.1 9000/3930/20170/20292/8234/8123 linux/aarch64 Up /tidb-data/tiflash-9000 /tidb-deploy/tiflash-9000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 7
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tiflash
+ Stopping instance 10.42.0.1
+ Stop tiflash 10.42.0.1:9000 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Cluster name 'tidb-cluster' is duplicated (deploy.name_dup)
+
+Please specify another cluster name
+
+[root@localhost ~]# tiup cluster delete tidb-cluster
+
+Error: unknown command "delete" for "tiup"
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-14-35-47.log.
+[root@localhost ~]# tiup cluster destroy tidb-cluster
+
+ ██ ██ █████ ██████ ███ ██ ██ ███ ██ ██████
+ ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██ ██
+ ██ █ ██ ███████ ██████ ██ ██ ██ ██ ██ ██ ██ ██ ███
+ ██ ███ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
+ ███ ███ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████
+
+This operation will destroy tidb v8.5.1 cluster tidb-cluster and its data.
+Are you sure to continue?
+(Type "Yes, I know my cluster and data will be deleted." to continue)
+: Yes,I know my cluster and data will be deleted.
+
+Error: Operation aborted by user (with incorrect answer 'Yes,I know my cluster and data will be deleted.') (tui.operation_aborted)
+[root@localhost ~]# tiup cluster destroy tidb-cluster
+
+ ██ ██ █████ ██████ ███ ██ ██ ███ ██ ██████
+ ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██ ██
+ ██ █ ██ ███████ ██████ ██ ██ ██ ██ ██ ██ ██ ██ ███
+ ██ ███ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
+ ███ ███ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████
+
+This operation will destroy tidb v8.5.1 cluster tidb-cluster and its data.
+Are you sure to continue?
+(Type "Yes, I know my cluster and data will be deleted." to continue)
+: Yes, I know my cluster and data will be deleted.
+Destroying cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tiflash
+ Stopping instance 10.42.0.1
+ Stop tiflash 10.42.0.1:9000 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
++ [ Serial ] - DestroyCluster
+Destroying component alertmanager
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy alertmanager paths: [/tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093/log /tidb-deploy/alertmanager-9093 /etc/systemd/system/alertmanager-9093.service]
+Destroying component grafana
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy grafana paths: [/tidb-deploy/grafana-3000 /etc/systemd/system/grafana-3000.service]
+Destroying component prometheus
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy prometheus paths: [/etc/systemd/system/prometheus-9090.service /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090/log /tidb-deploy/prometheus-9090]
+Destroying component tiflash
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy tiflash paths: [/tidb-data/tiflash-9000 /tidb-deploy/tiflash-9000/log /tidb-deploy/tiflash-9000 /etc/systemd/system/tiflash-9000.service]
+Destroying component tidb
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy tidb paths: [/tidb-deploy/tidb-4000 /etc/systemd/system/tidb-4000.service /tidb-deploy/tidb-4000/log]
+Destroying component tikv
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy tikv paths: [/tidb-data/tikv-20160 /tidb-deploy/tikv-20160/log /tidb-deploy/tikv-20160 /etc/systemd/system/tikv-20160.service]
+Destroying component pd
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy pd paths: [/tidb-data/pd-2379 /tidb-deploy/pd-2379/log /tidb-deploy/pd-2379 /etc/systemd/system/pd-2379.service]
+Destroying monitored 10.42.0.1
+ Destroying instance 10.42.0.1
+Destroy monitored on 10.42.0.1 success
+Clean global directories 10.42.0.1
+ Clean directory /tidb-deploy on instance 10.42.0.1
+ Clean directory /tidb-data on instance 10.42.0.1
+Clean global directories 10.42.0.1 success
+Delete public key 10.42.0.1
+Delete public key 10.42.0.1 success
+Destroyed cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Failed to parse topology file topology.yaml (topology.parse_failed)
+ caused by: yaml: line 59: did not find expected key
+
+Please check the syntax of your topology file topology.yaml and try again.
+
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Failed to parse topology file topology.yaml (topology.parse_failed)
+ caused by: yaml: line 58: did not find expected key
+
+Please check the syntax of your topology file topology.yaml and try again.
+
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Failed to parse topology file topology.yaml (topology.parse_failed)
+ caused by: yaml: line 58: did not find expected key
+
+Please check the syntax of your topology file topology.yaml and try again.
+
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+
+Error: Failed to parse topology file topology.yaml (topology.parse_failed)
+ caused by: yaml: line 58: did not find expected key
+
+Please check the syntax of your topology file topology.yaml and try again.
+
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Error
+
+Error: failed to fetch cpu-arch or kernel-name: executor.ssh.execute_failed: Failed to execute command over SSH for 'root@10.42.0.1:22' {ssh_stderr: , ssh_stdout: , ssh_command: export LANG=C; PATH=$PATH:/bin:/sbin:/usr/bin:/usr/sbin; uname -m}, cause: ssh: handshake failed: ssh: unable to authenticate, attempted methods [none], no supported methods remain
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-14-57-18.log.
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Done
+
++ Detect CPU OS Name
+ - Detecting node 10.42.0.1 OS info ... Done
+Please confirm your topology:
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Role Host Ports OS/Arch Directories
+---- ---- ----- ------- -----------
+pd 10.42.0.1 2379/2380 linux/aarch64 /tidb-deploy/pd-2379,/tidb-data/pd-2379
+tikv 10.42.0.1 20160/20180 linux/aarch64 /tidb-deploy/tikv-20160,/tidb-data/tikv-20160
+tidb 10.42.0.1 4000/10080 linux/aarch64 /tidb-deploy/tidb-4000
+cdc 10.42.0.1 8300 linux/aarch64 /tidb-deploy/cdc-8300,/tidb-data/cdc-8300
+prometheus 10.42.0.1 9090/12020 linux/aarch64 /tidb-deploy/prometheus-9090,/tidb-data/prometheus-9090
+grafana 10.42.0.1 3000 linux/aarch64 /tidb-deploy/grafana-3000
+alertmanager 10.42.0.1 9093/9094 linux/aarch64 /tidb-deploy/alertmanager-9093,/tidb-data/alertmanager-9093
+Attention:
+ 1. If the topology is not what you expected, check your yaml file.
+ 2. Please confirm there is no port/directory conflicts in same host.
+Do you want to continue? [y/N]: (default=N) y
++ Generate SSH keys ... Done
++ Download TiDB components
+ - Download pd:v8.5.1 (linux/arm64) ... Done
+ - Download tikv:v8.5.1 (linux/arm64) ... Done
+ - Download tidb:v8.5.1 (linux/arm64) ... Done
+ - Download cdc:v8.5.1 (linux/arm64) ... Error
+ - Download prometheus:v8.5.1 (linux/arm64) ... Done
+ - Download grafana:v8.5.1 (linux/arm64) ... Done
+ - Download alertmanager: (linux/arm64) ... Done
+ - Download node_exporter: (linux/arm64) ... Done
+ - Download blackbox_exporter: (linux/arm64) ... Done
+
+Error: version v8.5.1 on linux/arm64 for component cdc not found: unknown version
+
+Verbose debug logs has been written to /root/.tiup/logs/tiup-cluster-debug-2025-04-24-14-57-41.log.
+[root@localhost ~]# vi /root/topology.yaml
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Done
+
++ Detect CPU OS Name
+ - Detecting node 10.42.0.1 OS info ... Done
+Please confirm your topology:
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Role Host Ports OS/Arch Directories
+---- ---- ----- ------- -----------
+pd 10.42.0.1 2379/2380 linux/aarch64 /tidb-deploy/pd-2379,/tidb-data/pd-2379
+tikv 10.42.0.1 20160/20180 linux/aarch64 /tidb-deploy/tikv-20160,/tidb-data/tikv-20160
+tidb 10.42.0.1 4000/10080 linux/aarch64 /tidb-deploy/tidb-4000
+prometheus 10.42.0.1 9090/12020 linux/aarch64 /tidb-deploy/prometheus-9090,/tidb-data/prometheus-9090
+grafana 10.42.0.1 3000 linux/aarch64 /tidb-deploy/grafana-3000
+alertmanager 10.42.0.1 9093/9094 linux/aarch64 /tidb-deploy/alertmanager-9093,/tidb-data/alertmanager-9093
+Attention:
+ 1. If the topology is not what you expected, check your yaml file.
+ 2. Please confirm there is no port/directory conflicts in same host.
+Do you want to continue? [y/N]: (default=N) y
++ Generate SSH keys ... Done
++ Download TiDB components
+ - Download pd:v8.5.1 (linux/arm64) ... Done
+ - Download tikv:v8.5.1 (linux/arm64) ... Done
+ - Download tidb:v8.5.1 (linux/arm64) ... Done
+ - Download prometheus:v8.5.1 (linux/arm64) ... Done
+ - Download grafana:v8.5.1 (linux/arm64) ... Done
+ - Download alertmanager: (linux/arm64) ... Done
+ - Download node_exporter: (linux/arm64) ... Done
+ - Download blackbox_exporter: (linux/arm64) ... Done
++ Initialize target host environments
+ - Prepare 10.42.0.1:22 ... Done
++ Deploy TiDB instance
+ - Copy pd -> 10.42.0.1 ... Done
+ - Copy tikv -> 10.42.0.1 ... Done
+ - Copy tidb -> 10.42.0.1 ... Done
+ - Copy prometheus -> 10.42.0.1 ... Done
+ - Copy grafana -> 10.42.0.1 ... Done
+ - Copy alertmanager -> 10.42.0.1 ... Done
+ - Deploy node_exporter -> 10.42.0.1 ... Done
+ - Deploy blackbox_exporter -> 10.42.0.1 ... Done
++ Copy certificate to remote host
++ Init instance configs
+ - Generate config pd -> 10.42.0.1:2379 ... Done
+ - Generate config tikv -> 10.42.0.1:20160 ... Done
+ - Generate config tidb -> 10.42.0.1:4000 ... Done
+ - Generate config prometheus -> 10.42.0.1:9090 ... Done
+ - Generate config grafana -> 10.42.0.1:3000 ... Done
+ - Generate config alertmanager -> 10.42.0.1:9093 ... Done
++ Init monitor configs
+ - Generate config node_exporter -> 10.42.0.1 ... Done
+ - Generate config blackbox_exporter -> 10.42.0.1 ... Done
+Enabling component pd
+ Enabling instance 10.42.0.1:2379
+ Enable instance 10.42.0.1:2379 success
+Enabling component tikv
+ Enabling instance 10.42.0.1:20160
+ Enable instance 10.42.0.1:20160 success
+Enabling component tidb
+ Enabling instance 10.42.0.1:4000
+ Enable instance 10.42.0.1:4000 success
+Enabling component prometheus
+ Enabling instance 10.42.0.1:9090
+ Enable instance 10.42.0.1:9090 success
+Enabling component grafana
+ Enabling instance 10.42.0.1:3000
+ Enable instance 10.42.0.1:3000 success
+Enabling component alertmanager
+ Enabling instance 10.42.0.1:9093
+ Enable instance 10.42.0.1:9093 success
+Enabling component node_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Enabling component blackbox_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Cluster `tidb-cluster` deployed successfully, you can start it with command: `tiup cluster start tidb-cluster --init`
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster destroy tidb-cluster
+
+ ██ ██ █████ ██████ ███ ██ ██ ███ ██ ██████
+ ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██ ██
+ ██ █ ██ ███████ ██████ ██ ██ ██ ██ ██ ██ ██ ██ ███
+ ██ ███ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
+ ███ ███ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████
+
+This operation will destroy tidb v8.5.1 cluster tidb-cluster and its data.
+Are you sure to continue?
+(Type "Yes, I know my cluster and data will be deleted." to continue)
+: Yes, I know my cluster and data will be deleted.
+Destroying cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
++ [ Serial ] - DestroyCluster
+Destroying component alertmanager
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy alertmanager paths: [/tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093/log /tidb-deploy/alertmanager-9093 /etc/systemd/system/alertmanager-9093.service]
+Destroying component grafana
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy grafana paths: [/tidb-deploy/grafana-3000 /etc/systemd/system/grafana-3000.service]
+Destroying component prometheus
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy prometheus paths: [/tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090/log /tidb-deploy/prometheus-9090 /etc/systemd/system/prometheus-9090.service]
+Destroying component tidb
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy tidb paths: [/tidb-deploy/tidb-4000/log /tidb-deploy/tidb-4000 /etc/systemd/system/tidb-4000.service]
+Destroying component tikv
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy tikv paths: [/tidb-data/tikv-20160 /tidb-deploy/tikv-20160/log /tidb-deploy/tikv-20160 /etc/systemd/system/tikv-20160.service]
+Destroying component pd
+ Destroying instance 10.42.0.1
+Destroy 10.42.0.1 finished
+- Destroy pd paths: [/etc/systemd/system/pd-2379.service /tidb-data/pd-2379 /tidb-deploy/pd-2379/log /tidb-deploy/pd-2379]
+Destroying monitored 10.42.0.1
+ Destroying instance 10.42.0.1
+Destroy monitored on 10.42.0.1 success
+Clean global directories 10.42.0.1
+ Clean directory /tidb-deploy on instance 10.42.0.1
+ Clean directory /tidb-data on instance 10.42.0.1
+Clean global directories 10.42.0.1 success
+Delete public key 10.42.0.1
+Delete public key 10.42.0.1 success
+Destroyed cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster deploy tidb-cluster v8.5.1 topology.yaml --user root -p
+Input SSH password:
+
++ Detect CPU Arch Name
+ - Detecting node 10.42.0.1 Arch info ... Done
+
++ Detect CPU OS Name
+ - Detecting node 10.42.0.1 OS info ... Done
+Please confirm your topology:
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Role Host Ports OS/Arch Directories
+---- ---- ----- ------- -----------
+pd 10.42.0.1 2379/2380 linux/aarch64 /tidb-deploy/pd-2379,/tidb-data/pd-2379
+tikv 10.42.0.1 20160/20180 linux/aarch64 /tidb-deploy/tikv-20160,/tidb-data/tikv-20160
+tidb 10.42.0.1 4000/10080 linux/aarch64 /tidb-deploy/tidb-4000
+prometheus 10.42.0.1 9090/12020 linux/aarch64 /tidb-deploy/prometheus-9090,/tidb-data/prometheus-9090
+grafana 10.42.0.1 3000 linux/aarch64 /tidb-deploy/grafana-3000
+alertmanager 10.42.0.1 9093/9094 linux/aarch64 /tidb-deploy/alertmanager-9093,/tidb-data/alertmanager-9093
+Attention:
+ 1. If the topology is not what you expected, check your yaml file.
+ 2. Please confirm there is no port/directory conflicts in same host.
+Do you want to continue? [y/N]: (default=N) y
++ Generate SSH keys ... Done
++ Download TiDB components
+ - Download pd:v8.5.1 (linux/arm64) ... Done
+ - Download tikv:v8.5.1 (linux/arm64) ... Done
+ - Download tidb:v8.5.1 (linux/arm64) ... Done
+ - Download prometheus:v8.5.1 (linux/arm64) ... Done
+ - Download grafana:v8.5.1 (linux/arm64) ... Done
+ - Download alertmanager: (linux/arm64) ... Done
+ - Download node_exporter: (linux/arm64) ... Done
+ - Download blackbox_exporter: (linux/arm64) ... Done
++ Initialize target host environments
+ - Prepare 10.42.0.1:22 ... Done
++ Deploy TiDB instance
+ - Copy pd -> 10.42.0.1 ... Done
+ - Copy tikv -> 10.42.0.1 ... Done
+ - Copy tidb -> 10.42.0.1 ... Done
+ - Copy prometheus -> 10.42.0.1 ... Done
+ - Copy grafana -> 10.42.0.1 ... Done
+ - Copy alertmanager -> 10.42.0.1 ... Done
+ - Deploy node_exporter -> 10.42.0.1 ... Done
+ - Deploy blackbox_exporter -> 10.42.0.1 ... Done
++ Copy certificate to remote host
++ Init instance configs
+ - Generate config pd -> 10.42.0.1:2379 ... Done
+ - Generate config tikv -> 10.42.0.1:20160 ... Done
+ - Generate config tidb -> 10.42.0.1:4000 ... Done
+ - Generate config prometheus -> 10.42.0.1:9090 ... Done
+ - Generate config grafana -> 10.42.0.1:3000 ... Done
+ - Generate config alertmanager -> 10.42.0.1:9093 ... Done
++ Init monitor configs
+ - Generate config node_exporter -> 10.42.0.1 ... Done
+ - Generate config blackbox_exporter -> 10.42.0.1 ... Done
+Enabling component pd
+ Enabling instance 10.42.0.1:2379
+ Enable instance 10.42.0.1:2379 success
+Enabling component tikv
+ Enabling instance 10.42.0.1:20160
+ Enable instance 10.42.0.1:20160 success
+Enabling component tidb
+ Enabling instance 10.42.0.1:4000
+ Enable instance 10.42.0.1:4000 success
+Enabling component prometheus
+ Enabling instance 10.42.0.1:9090
+ Enable instance 10.42.0.1:9090 success
+Enabling component grafana
+ Enabling instance 10.42.0.1:3000
+ Enable instance 10.42.0.1:3000 success
+Enabling component alertmanager
+ Enabling instance 10.42.0.1:9093
+ Enable instance 10.42.0.1:9093 success
+Enabling component node_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Enabling component blackbox_exporter
+ Enabling instance 10.42.0.1
+ Enable 10.42.0.1 success
+Cluster `tidb-cluster` deployed successfully, you can start it with command: `tiup cluster start tidb-cluster --init`
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Down /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Down - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Down /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Down - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Down /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 1174405126
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| test |
++--------------------+
+6 rows in set (0.001 sec)
+
+MySQL [(none)]> create database tao_iot;
+show databases;
+^CCtrl-C -- query killed. Continuing normally.
+^CCtrl-C -- query killed. Continuing normally.
+^CCtrl-C -- exit!
+Aborted
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 1174405132
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| tao_iot |
+| test |
++--------------------+
+7 rows in set (0.001 sec)
+
+MySQL [(none)]> exit;
+Bye
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N) y
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StopCluster
+Stopping component alertmanager
+ Stopping instance 10.42.0.1
+ Stop alertmanager 10.42.0.1:9093 success
+Stopping component grafana
+ Stopping instance 10.42.0.1
+ Stop grafana 10.42.0.1:3000 success
+Stopping component prometheus
+ Stopping instance 10.42.0.1
+ Stop prometheus 10.42.0.1:9090 success
+Stopping component tidb
+ Stopping instance 10.42.0.1
+ Stop tidb 10.42.0.1:4000 success
+Stopping component tikv
+ Stopping instance 10.42.0.1
+ Stop tikv 10.42.0.1:20160 success
+Stopping component pd
+ Stopping instance 10.42.0.1
+ Stop pd 10.42.0.1:2379 success
+Stopping component node_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopping component blackbox_exporter
+ Stopping instance 10.42.0.1
+ Stop 10.42.0.1 success
+Stopped cluster `tidb-cluster` successfully
+[root@localhost ~]# tiup cluster start tidb-cluster
+Starting cluster tidb-cluster...
++ [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa.pub
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [Parallel] - UserSSH: user=tidb, host=10.42.0.1
++ [ Serial ] - StartCluster
+Starting component pd
+ Starting instance 10.42.0.1:2379
+ Start instance 10.42.0.1:2379 success
+Starting component tikv
+ Starting instance 10.42.0.1:20160
+ Start instance 10.42.0.1:20160 success
+Starting component tidb
+ Starting instance 10.42.0.1:4000
+ Start instance 10.42.0.1:4000 success
+Starting component prometheus
+ Starting instance 10.42.0.1:9090
+ Start instance 10.42.0.1:9090 success
+Starting component grafana
+ Starting instance 10.42.0.1:3000
+ Start instance 10.42.0.1:3000 success
+Starting component alertmanager
+ Starting instance 10.42.0.1:9093
+ Start instance 10.42.0.1:9093 success
+Starting component node_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
+Starting component blackbox_exporter
+ Starting instance 10.42.0.1
+ Start 10.42.0.1 success
++ [ Serial ] - UpdateTopology: cluster=tidb-cluster
+Started cluster `tidb-cluster` successfully
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+Welcome to the MariaDB monitor. Commands end with ; or \g.
+Your MySQL connection id is 648019974
+Server version: 8.0.11-TiDB-v8.5.1 TiDB Server (Apache License 2.0) Community Edition, MySQL 8.0 compatible
+
+Copyright (c) 2000, 2018, Oracle, MariaDB Corporation Ab and others.
+
+Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
+
+MySQL [(none)]> show databses;
+ERROR 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your TiDB version for the right syntax to use line 1 column 13 near "databses"
+MySQL [(none)]> show databases;
++--------------------+
+| Database |
++--------------------+
+| INFORMATION_SCHEMA |
+| METRICS_SCHEMA |
+| PERFORMANCE_SCHEMA |
+| mysql |
+| sys |
+| test |
++--------------------+
+6 rows in set (0.000 sec)
+
+MySQL [(none)]> exit;
+Bye
+[root@localhost ~]# tuip cluster display tidb-cluster
+-bash: tuip:未找到命令
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# tiup cluster list
+Name User Version Path PrivateKey
+---- ---- ------- ---- ----------
+tidb-cluster tidb v8.5.1 /root/.tiup/storage/cluster/clusters/tidb-cluster /root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa
+[root@localhost ~]#
+
+
+
+# 1. 停止 TiDB 集群
+tiup cluster stop tidb-cluster
+
+# 2. 在 /media/raid1 创建 TiDB 数据目录
+mkdir -p /media/raid1/tidb-data
+
+# 3. 备份现有数据
+cp -a /tidb-data/* /media/raid1/tidb-data/
+
+# 4. 创建符号链接
+mv /tidb-data /tidb-data.bak
+ln -s /media/raid1/tidb-data /tidb-data
+
+# 5. 设置适当的权限
+chown -R tidb:tidb /media/raid1/tidb-data
+chmod -R 750 /media/raid1/tidb-data
+
+# 6. 启动 TiDB 集群
+tiup cluster start tidb-cluster
+
+
+
+
+[root@localhost ~]# ls -la /tidb-data
+ls: 无法访问 '/tidb-data': 没有那个文件或目录
+[root@localhost ~]# /media/raid1/
+-bash: /media/raid1/:是一个目录
+[root@localhost ~]# find /media/raid1/ -name "tidb-data" -type d
+/media/raid1/tidb-data
+[root@localhost ~]# find /media/raid1/ -type d -maxdepth 1
+find: warning: you have specified the global option -maxdepth after the argument -type, but global options are not positional, i.e., -maxdepth affects tests specified before it as well as those specified after it. Please specify global options before other arguments.
+/media/raid1/
+/media/raid1/tidb-data
+[root@localhost ~]#
+[root@localhost ~]# tiup cluster stop tidb-cluster
+Will stop the cluster tidb-cluster with nodes: , roles: .
+Do you want to continue? [y/N]:(default=N)
+Error: Operation aborted by user (with answer '') (tui.operation_aborted)
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network.target
+>
+> [Service]
+> User=root
+> Type=forking
+> ExecStart=/bin/bash -c "source /root/.bash_profile && tiup cluster start tidb-cluster"
+> ExecStop=/bin/bash -c "source /root/.bash_profile && tiup cluster stop tidb-cluster"
+> Restart=on-failure
+> RestartSec=10
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# systemctl daemon-reload
+[root@localhost ~]# systemctl enable tidb-cluster.service
+Created symlink /etc/systemd/system/multi-user.target.wants/tidb-cluster.service → /etc/systemd/system/tidb-cluster.service.
+[root@localhost ~]# systemctl start tidb-cluster.service
+Job for tidb-cluster.service failed because a timeout was exceeded.
+See "systemctl status tidb-cluster.service" and "journalctl -xe" for details.
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p haiwei@123
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p haiwei@123
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]#
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network.target
+>
+> [Service]
+> User=root
+> Type=forking
+> ExecStart=/bin/bash -c "source /root/.bash_profile && tiup cluster start tidb-cluster -y"
+> ExecStop=/bin/bash -c "source /root/.bash_profile && tiup cluster stop tidb-cluster -y"
+> Restart=on-failure
+> RestartSec=10
+> TimeoutStartSec=600
+> TimeoutStopSec=600
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# ln -s /media/raid1/tidb-data /tidb-data
+[root@localhost ~]# chown -h tidb:tidb /tidb-data
+[root@localhost ~]# systemctl daemon-reload
+[root@localhost ~]# systemctl restart tidb-cluster.service
+Job for tidb-cluster.service failed because the control process exited with error code.
+See "systemctl status tidb-cluster.service" and "journalctl -xe" for details.
+[root@localhost ~]#
+
+
+>
+> [Service]
+> User=root
+> Type=forking
+> ExecStart=/bin/bash -c "source /root/.bash_profile && tiup cluster start tidb-cluster"
+> ExecStop=/bin/bash -c "source /root/.bash_profile && tiup cluster stop tidb-cluster"
+> Restart=on-failure
+> RestartSec=10
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# systemctl daemon-reload
+[root@localhost ~]# systemctl enable tidb-cluster.service
+Created symlink /etc/systemd/system/multi-user.target.wants/tidb-cluster.service → /etc/systemd/system/tidb-cluster.service.
+[root@localhost ~]# systemctl start tidb-cluster.service
+Job for tidb-cluster.service failed because a timeout was exceeded.
+See "systemctl status tidb-cluster.service" and "journalctl -xe" for details.
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p haiwei@123
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p haiwei@123
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# mysql -h 10.42.0.1 -P 4000 -u root -p
+Enter password:
+ERROR 2002 (HY000): Can't connect to MySQL server on '10.42.0.1' (115)
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network.target
+>
+> [Service]
+> User=root
+> Type=forking
+> ExecStart=/bin/bash -c "source /root/.bash_profile && tiup cluster start tidb-cluster -y"
+> ExecStop=/bin/bash -c "source /root/.bash_profile && tiup cluster stop tidb-cluster -y"
+> Restart=on-failure
+> RestartSec=10
+> TimeoutStartSec=600
+> TimeoutStopSec=600
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# ln -s /media/raid1/tidb-data /tidb-data
+[root@localhost ~]# chown -h tidb:tidb /tidb-data
+[root@localhost ~]# systemctl daemon-reload
+[root@localhost ~]# systemctl restart tidb-cluster.service
+Job for tidb-cluster.service failed because the control process exited with error code.
+See "systemctl status tidb-cluster.service" and "journalctl -xe" for details.
+[root@localhost ~]# ^C
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network-online.target
+> Wants=network-online.target
+>
+> [Service]
+> User=root
+> Type=oneshot
+> RemainAfterExit=yes
+> ExecStart=/bin/bash -c "source /root/.bash_profile && tiup cluster start tidb-cluster -y"
+> ExecStop=/bin/bash -c "source /root/.bash_profile && tiup cluster stop tidb-cluster -y"
+> Restart=on-failure
+> RestartSec=10
+> TimeoutStartSec=600
+> TimeoutStopSec=600
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# systemctl status tidb-cluster.service
+Warning: The unit file, source configuration file or drop-ins of tidb-cluster.service changed on disk. Run 'systemctl daemon-reload' to reload units.
+● tidb-cluster.service - TiDB集群服务
+ Loaded: loaded (/etc/systemd/system/tidb-cluster.service; enabled; vendor preset: disabled)
+ Active: activating (start) since Thu 2025-04-24 16:07:51 CST; 1min 31s ago
+Cntrl PID: 540413 (tiup)
+ Tasks: 30
+ Memory: 26.5M
+ CGroup: /system.slice/tidb-cluster.service
+ ├─540413 tiup cluster start tidb-cluster -y
+ └─540466 /root/.tiup/components/cluster/v1.16.1/tiup-cluster start tidb-cluster -y
+
+4月 24 16:07:52 localhost.localdomain bash[540466]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:07:52 localhost.localdomain bash[540466]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:07:52 localhost.localdomain bash[540466]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:07:52 localhost.localdomain bash[540466]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:07:52 localhost.localdomain bash[540466]: + [ Serial ] - StartCluster
+4月 24 16:07:52 localhost.localdomain bash[540466]: Starting component pd
+4月 24 16:07:52 localhost.localdomain bash[540466]: Starting instance 10.42.0.1:2379
+4月 24 16:07:53 localhost.localdomain bash[540466]: Start instance 10.42.0.1:2379 success
+4月 24 16:07:53 localhost.localdomain bash[540466]: Starting component tikv
+4月 24 16:07:53 localhost.localdomain bash[540466]: Starting instance 10.42.0.1:20160
+[root@localhost ~]# journalctl -u tidb-cluster.service
+-- Logs begin at Wed 2025-04-23 14:23:29 CST, end at Thu 2025-04-24 16:09:32 CST. --
+4月 24 16:01:12 localhost.localdomain systemd[1]: Starting TiDB集群服务...
+4月 24 16:01:12 localhost.localdomain bash[518232]: Starting cluster tidb-cluster...
+4月 24 16:01:12 localhost.localdomain bash[518232]: + [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-clu>4月 24 16:01:12 localhost.localdomain bash[518232]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:01:12 localhost.localdomain bash[518232]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:01:12 localhost.localdomain bash[518232]: + [ Serial ] - StartCluster
+4月 24 16:01:12 localhost.localdomain bash[518232]: Starting component pd
+4月 24 16:01:12 localhost.localdomain bash[518232]: Starting instance 10.42.0.1:2379
+4月 24 16:02:42 localhost.localdomain systemd[1]: tidb-cluster.service: start operation timed out. Terminating.
+4月 24 16:02:42 localhost.localdomain bash[518186]: Got signal terminated (Component: cluster ; PID: 518232)
+4月 24 16:02:42 localhost.localdomain systemd[1]: tidb-cluster.service: Control process exited, code=exited, status=255/EXCEPTION
+4月 24 16:02:42 localhost.localdomain systemd[1]: tidb-cluster.service: Failed with result 'timeout'.
+4月 24 16:02:42 localhost.localdomain systemd[1]: Failed to start TiDB集群服务.
+4月 24 16:02:52 localhost.localdomain systemd[1]: tidb-cluster.service: Service RestartSec=10s expired, scheduling restart.
+4月 24 16:02:52 localhost.localdomain systemd[1]: tidb-cluster.service: Scheduled restart job, restart counter is at 1.
+4月 24 16:02:52 localhost.localdomain systemd[1]: Stopped TiDB集群服务.
+4月 24 16:02:52 localhost.localdomain systemd[1]: Starting TiDB集群服务...
+4月 24 16:02:53 localhost.localdomain bash[522805]: Starting cluster tidb-cluster...
+4月 24 16:02:53 localhost.localdomain bash[522805]: + [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-clu>4月 24 16:02:53 localhost.localdomain bash[522805]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:02:53 localhost.localdomain bash[522805]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:02:53 localhost.localdomain bash[522805]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:02:53 localhost.localdomain bash[522805]: + [ Serial ] - StartCluster
+4月 24 16:02:53 localhost.localdomain bash[522805]: Starting component pd
+4月 24 16:02:53 localhost.localdomain bash[522805]: Starting instance 10.42.0.1:2379
+4月 24 16:04:22 localhost.localdomain systemd[1]: tidb-cluster.service: start operation timed out. Terminating.
+4月 24 16:04:22 localhost.localdomain bash[522756]: Got signal terminated (Component: cluster ; PID: 522805)
+4月 24 16:04:22 localhost.localdomain systemd[1]: tidb-cluster.service: Control process exited, code=exited, status=255/EXCEPTION
+4月 24 16:04:22 localhost.localdomain systemd[1]: tidb-cluster.service: Failed with result 'timeout'.
+4月 24 16:04:22 localhost.localdomain systemd[1]: Failed to start TiDB集群服务.
+4月 24 16:04:33 localhost.localdomain systemd[1]: tidb-cluster.service: Service RestartSec=10s expired, scheduling restart.
+4月 24 16:04:33 localhost.localdomain systemd[1]: tidb-cluster.service: Scheduled restart job, restart counter is at 2.
+4月 24 16:04:33 localhost.localdomain systemd[1]: Stopped TiDB集群服务.
+4月 24 16:04:33 localhost.localdomain systemd[1]: Starting TiDB集群服务...
+4月 24 16:04:33 localhost.localdomain bash[526964]: Starting cluster tidb-cluster...
+4月 24 16:04:33 localhost.localdomain bash[526964]: + [ Serial ] - SSHKeySet: privateKey=/root/.tiup/storage/cluster/clusters/tidb-cluster/ssh/id_rsa, publicKey=/root/.tiup/storage/cluster/clusters/tidb-clu>4月 24 16:04:33 localhost.localdomain bash[526964]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:04:33 localhost.localdomain bash[526964]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:04:33 localhost.localdomain bash[526964]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:04:33 localhost.localdomain bash[526964]: + [ Serial ] - StartCluster
+4月 24 16:04:33 localhost.localdomain bash[526964]: Starting component pd
+4月 24 16:04:33 localhost.localdomain bash[526964]: Starting instance 10.42.0.1:2379
+4月 24 16:05:24 localhost.localdomain bash[526964]: Start instance 10.42.0.1:2379 success
+4月 24 16:05:24 localhost.localdomain bash[526964]: Starting component tikv
+4月 24 16:05:24 localhost.localdomain bash[526964]: Starting instance 10.42.0.1:20160
+4月 24 16:05:33 localhost.localdomain bash[526917]: Got signal terminated (Component: cluster ; PID: 526964)
+[root@localhost ~]# ls -la /tidb-data.bak
+总用量 4
+drwxr-xr-x 7 tidb tidb 107 4月 24 15:06 .
+dr-xr-xr-x 23 root root 4096 4月 24 16:05 ..
+drwxr-xr-x 2 tidb tidb 35 4月 24 15:28 alertmanager-9093
+drwxr-xr-x 2 tidb tidb 6 4月 24 15:06 monitor-9100
+drwxr-xr-x 2 tidb tidb 6 4月 24 15:05 pd-2379
+drwxr-xr-x 3 tidb tidb 40 4月 24 15:10 prometheus-9090
+drwxr-xr-x 5 tidb tidb 152 4月 24 15:35 tikv-20160
+[root@localhost ~]# chown -R tidb:tidb /media/raid1/tidb-data
+[root@localhost ~]# chown -R tidb:tidb /media/raid1/tidb-data
+[root@localhost ~]# chmod -R 750 /media/raid1/tidb-data
+[root@localhost ~]# echo 'export PATH=$PATH:/usr/local/bin' >> /root/.bash_profile
+[root@localhost ~]# echo 'export TIUP_HOME=/root/.tiup' >> /root/.bash_profile
+[root@localhost ~]# source /root/.bash_profile
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network-online.target
+> Wants=network-online.target
+>
+> [Service]
+> User=root
+> Type=oneshot
+> RemainAfterExit=yes
+> ExecStart=/bin/bash -c "/root/.tiup/bin/tiup cluster start tidb-cluster -y"
+> ExecStop=/bin/bash -c "/root/.tiup/bin/tiup cluster stop tidb-cluster -y"
+> Restart=on-failure
+> RestartSec=10
+> TimeoutStartSec=600
+> TimeoutStopSec=600
+>
+> [Install]
+> WantedBy=multi-user.target
+> EOF
+[root@localhost ~]# cat > /etc/systemd/system/tidb-cluster.service << 'EOF'
+> [Unit]
+> Description=TiDB集群服务
+> After=network-online.target
+> Wants=network-online.target
+>
+> [Service]
+> User=root
+> Type=oneshot
+> RemainAfterExit=yes
+> ExecStart=/bin/bash -c "/root/.tiup/bin/tiup cluster start tidb-cluster -y"
+> ExecStop=/bin/bash -c "/root/.tiup/bin/tiup cluster stop tidb-cluster -y"
+> Restart=on-failure
+> RestartSec=10
+> TimeoutStartSec=600
+> TimeoutStopSec=600
+>
+> [Install]
+> WantedBy=multi-user.target
+> ^C
+[root@localhost ~]# systemctl daemon-reload
+[root@localhost ~]# systemctl restart tidb-cluster.service
+Failed to restart tidb-cluster.service: Unit tidb-cluster.service has a bad unit file setting.
+See system logs and 'systemctl status tidb-cluster.service' for details.
+[root@localhost ~]# systemctl status tidb-cluster.service
+● tidb-cluster.service - TiDB集群服务
+ Loaded: bad-setting (Reason: Unit tidb-cluster.service has a bad unit file setting.)
+ Active: activating (start) since Thu 2025-04-24 16:10:08 CST; 1min 25s ago
+Cntrl PID: 550277 (tiup)
+ Tasks: 29
+ Memory: 23.0M
+ CGroup: /system.slice/tidb-cluster.service
+ ├─550277 tiup cluster start tidb-cluster -y
+ └─550331 /root/.tiup/components/cluster/v1.16.1/tiup-cluster start tidb-cluster -y
+
+4月 24 16:10:09 localhost.localdomain bash[550331]: + [Parallel] - UserSSH: user=tidb, host=10.42.0.1
+4月 24 16:10:09 localhost.localdomain bash[550331]: + [ Serial ] - StartCluster
+4月 24 16:10:09 localhost.localdomain bash[550331]: Starting component pd
+4月 24 16:10:09 localhost.localdomain bash[550331]: Starting instance 10.42.0.1:2379
+4月 24 16:10:09 localhost.localdomain systemd[1]: tidb-cluster.service: Service has Restart= setting other than no, which isn't allowed for Type=oneshot services. Refusing.
+4月 24 16:10:10 localhost.localdomain bash[550331]: Start instance 10.42.0.1:2379 success
+4月 24 16:10:10 localhost.localdomain bash[550331]: Starting component tikv
+4月 24 16:10:10 localhost.localdomain bash[550331]: Starting instance 10.42.0.1:20160
+4月 24 16:10:10 localhost.localdomain systemd[1]: tidb-cluster.service: Service has Restart= setting other than no, which isn't allowed for Type=oneshot services. Refusing.
+4月 24 16:11:19 localhost.localdomain systemd[1]: tidb-cluster.service: Service has Restart= setting other than no, which isn't allowed for Type=oneshot services. Refusing.
+[root@localhost ~]#
+
+
+
+
+ os: linux
+monitoring_servers:
+- host: 10.42.0.1
+ ssh_port: 22
+ port: 9090
+ ng_port: 12020
+ deploy_dir: /tidb-deploy/prometheus-9090
+ data_dir: /tidb-data/prometheus-9090
+ log_dir: /tidb-deploy/prometheus-9090/log
+ external_alertmanagers: []
+ arch: arm64
+ os: linux
+grafana_servers:
+- host: 10.42.0.1
+ ssh_port: 22
+ port: 3000
+ deploy_dir: /tidb-deploy/grafana-3000
+ arch: arm64
+ os: linux
+ username: admin
+ password: admin
+ anonymous_enable: false
+ root_url: ""
+ domain: ""
+alertmanager_servers:
+- host: 10.42.0.1
+ ssh_port: 22
+ web_port: 9093
+ cluster_port: 9094
+ deploy_dir: /tidb-deploy/alertmanager-9093
+ data_dir: /tidb-data/alertmanager-9093
+ log_dir: /tidb-deploy/alertmanager-9093/log
+ arch: arm64
+ os: linux
+
+# 1. 停止TiDB集群
+tiup cluster stop tidb-cluster
+
+# 2. 删除旧的套接字文件
+rm -f /tmp/tidb-4000.sock
+
+# 3. 确保/tmp目录权限正确
+chmod 1777 /tmp
+
+# 4. 启动TiDB集群
+tiup cluster start tidb-cluster
+
+
+[root@localhost ~]# sudo yum install redis
+上次元数据过期检查:0:04:06 前,执行于 2025年04月27日 星期日 10时37分10秒。
+依赖关系解决。
+========================================================================================================= Package Architecture Version Repository Size
+=========================================================================================================安装:
+ redis aarch64 7.2.7-1.p01.ky10 ks10-adv-updates 2.0 M
+
+事务概要
+=========================================================================================================安装 1 软件包
+
+总下载:2.0 M
+安装大小:7.5 M
+确定吗?[y/N]: y
+下载软件包:
+redis-7.2.7-1.p01.ky10.aarch64.rpm 2.6 MB/s | 2.0 MB 00:00
+---------------------------------------------------------------------------------------------------------总计 2.6 MB/s | 2.0 MB 00:00
+运行事务检查
+事务检查成功。
+运行事务测试
+事务测试成功。
+运行事务
+ 准备中 : 1/1
+ 运行脚本: redis-7.2.7-1.p01.ky10.aarch64 1/1
+ 安装 : redis-7.2.7-1.p01.ky10.aarch64 1/1
+ 运行脚本: redis-7.2.7-1.p01.ky10.aarch64 1/1
+ 验证 : redis-7.2.7-1.p01.ky10.aarch64 1/1
+
+已安装:
+ redis-7.2.7-1.p01.ky10.aarch64
+
+完毕!
+
+
+
+
+[root@localhost ~]# sudo apt-get install lsb-release curl gpg
+sudo: apt-get:找不到命令
+[root@localhost ~]# sudo vi /etc/yum.repos.d/redis.repo
+[root@localhost ~]# sudo vi /etc/yum.repos.d/redis.repo
+[root@localhost ~]# curl -fsSL https://packages.redis.io/gpg > /tmp/redis.key
+[root@localhost ~]# sudo rpm --import /tmp/redis.key
+[root@localhost ~]#
+
+Last login: Sun Apr 27 13:14:37 2025 from 10.42.0.12
+[root@localhost ~]# yum install gcc gcc-c++ make unzip pcre pcre-devel zlib zlib-devel libxml2 libxml2-devel readline readline-devel ncurses ncurses-devel perl-devel perl-ExtUtils-Embed openssl-devel -y
+上次元数据过期检查:1:45:51 前,执行于 2025年04月27日 星期日 11时48分51秒。
+软件包 gcc-7.3.0-20220207.45.p02.ky10.aarch64 已安装。
+软件包 gcc-c++-7.3.0-20220207.45.p02.ky10.aarch64 已安装。
+软件包 make-1:4.3-1.ky10.aarch64 已安装。
+软件包 unzip-6.0-47.ky10.aarch64 已安装。
+软件包 pcre-8.44-2.ky10.aarch64 已安装。
+软件包 pcre-devel-8.44-2.ky10.aarch64 已安装。
+软件包 zlib-1.2.11-18.ky10.aarch64 已安装。
+软件包 zlib-devel-1.2.11-18.ky10.aarch64 已安装。
+软件包 libxml2-2.9.10-25.ky10.aarch64 已安装。
+软件包 libxml2-devel-2.9.10-25.ky10.aarch64 已安装。
+软件包 readline-8.0-3.ky10.aarch64 已安装。
+软件包 ncurses-6.2-3.ky10.aarch64 已安装。
+软件包 perl-devel-4:5.28.3-7.se.01.ky10.aarch64 已安装。
+软件包 perl-4:5.28.3-7.se.01.ky10.aarch64 已安装。
+软件包 openssl-devel-1:1.1.1f-15.p05.ky10.aarch64 已安装。
+依赖关系解决。
+===================================================================================================== Package Architecture Version Repository Size
+=====================================================================================================安装:
+ ncurses-devel aarch64 6.2-6.ky10 ks10-adv-updates 659 k
+ readline-devel aarch64 8.0-3.ky10 ks10-adv-os 209 k
+升级:
+ libxml2 aarch64 2.9.10-43.p02.ky10 ks10-adv-updates 622 k
+ libxml2-devel aarch64 2.9.10-43.p02.ky10 ks10-adv-updates 1.7 M
+ minizip aarch64 1.2.11-23.ky10 ks10-adv-updates 29 k
+ ncurses aarch64 6.2-6.ky10 ks10-adv-updates 908 k
+ ncurses-base noarch 6.2-6.ky10 ks10-adv-updates 53 k
+ ncurses-libs aarch64 6.2-6.ky10 ks10-adv-updates 271 k
+ openssl aarch64 1:1.1.1f-15.p28.ky10 ks10-adv-updates 441 k
+ openssl-devel aarch64 1:1.1.1f-15.p28.ky10 ks10-adv-updates 1.8 M
+ openssl-help noarch 1:1.1.1f-15.p28.ky10 ks10-adv-updates 3.2 M
+ openssl-libs aarch64 1:1.1.1f-15.p28.ky10 ks10-adv-updates 1.3 M
+ openssl-perl aarch64 1:1.1.1f-15.p28.ky10 ks10-adv-updates 20 k
+ perl aarch64 4:5.28.3-7.se.01.p04.ky10 ks10-adv-updates 3.0 M
+ perl-devel aarch64 4:5.28.3-7.se.01.p04.ky10 ks10-adv-updates 1.9 M
+ perl-libs aarch64 4:5.28.3-7.se.01.p04.ky10 ks10-adv-updates 1.5 M
+ python3-libxml2 aarch64 2.9.10-43.p02.ky10 ks10-adv-updates 224 k
+ zlib aarch64 1.2.11-23.ky10 ks10-adv-updates 94 k
+ zlib-devel aarch64 1.2.11-23.ky10 ks10-adv-updates 98 k
+
+事务概要
+=====================================================================================================安装 2 软件包
+升级 17 软件包
+
+总下载:18 M
+下载软件包:
+(1/19): readline-devel-8.0-3.ky10.aarch64.rpm 538 kB/s | 209 kB 00:00
+(2/19): ncurses-devel-6.2-6.ky10.aarch64.rpm 1.3 MB/s | 659 kB 00:00
+(3/19): libxml2-2.9.10-43.p02.ky10.aarch64.rpm 1.0 MB/s | 622 kB 00:00
+(4/19): minizip-1.2.11-23.ky10.aarch64.rpm 247 kB/s | 29 kB 00:00
+(5/19): libxml2-devel-2.9.10-43.p02.ky10.aarch64.rpm 3.4 MB/s | 1.7 MB 00:00
+(6/19): ncurses-base-6.2-6.ky10.noarch.rpm 172 kB/s | 53 kB 00:00
+(7/19): ncurses-6.2-6.ky10.aarch64.rpm 2.1 MB/s | 908 kB 00:00
+(8/19): ncurses-libs-6.2-6.ky10.aarch64.rpm 1.4 MB/s | 271 kB 00:00
+(9/19): openssl-1.1.1f-15.p28.ky10.aarch64.rpm 809 kB/s | 441 kB 00:00
+(10/19): openssl-devel-1.1.1f-15.p28.ky10.aarch64.rpm 3.4 MB/s | 1.8 MB 00:00
+(11/19): openssl-perl-1.1.1f-15.p28.ky10.aarch64.rpm 179 kB/s | 20 kB 00:00
+(12/19): openssl-help-1.1.1f-15.p28.ky10.noarch.rpm 3.2 MB/s | 3.2 MB 00:00
+(13/19): openssl-libs-1.1.1f-15.p28.ky10.aarch64.rpm 1.8 MB/s | 1.3 MB 00:00
+(14/19): perl-devel-5.28.3-7.se.01.p04.ky10.aarch64.rpm 2.1 MB/s | 1.9 MB 00:00
+(15/19): perl-5.28.3-7.se.01.p04.ky10.aarch64.rpm 2.1 MB/s | 3.0 MB 00:01
+(16/19): perl-libs-5.28.3-7.se.01.p04.ky10.aarch64.rpm 1.4 MB/s | 1.5 MB 00:01
+(17/19): python3-libxml2-2.9.10-43.p02.ky10.aarch64.rpm 553 kB/s | 224 kB 00:00
+(18/19): zlib-1.2.11-23.ky10.aarch64.rpm 349 kB/s | 94 kB 00:00
+(19/19): zlib-devel-1.2.11-23.ky10.aarch64.rpm 643 kB/s | 98 kB 00:00
+-----------------------------------------------------------------------------------------------------总计 5.2 MB/s | 18 MB 00:03
+运行事务检查
+事务检查成功。
+运行事务测试
+事务测试成功。
+运行事务
+ 准备中 : 1/1
+ 升级 : zlib-1.2.11-23.ky10.aarch64 1/36
+ 升级 : libxml2-2.9.10-43.p02.ky10.aarch64 2/36
+ 运行脚本: libxml2-2.9.10-43.p02.ky10.aarch64 2/36
+ 升级 : openssl-libs-1:1.1.1f-15.p28.ky10.aarch64 3/36
+ 运行脚本: openssl-libs-1:1.1.1f-15.p28.ky10.aarch64 3/36
+ 升级 : zlib-devel-1.2.11-23.ky10.aarch64 4/36
+ 升级 : perl-libs-4:5.28.3-7.se.01.p04.ky10.aarch64 5/36
+ 升级 : perl-4:5.28.3-7.se.01.p04.ky10.aarch64 6/36
+ 升级 : ncurses-base-6.2-6.ky10.noarch 7/36
+ 升级 : ncurses-libs-6.2-6.ky10.aarch64 8/36
+ 升级 : ncurses-6.2-6.ky10.aarch64 9/36
+ 安装 : ncurses-devel-6.2-6.ky10.aarch64 10/36
+ 升级 : openssl-help-1:1.1.1f-15.p28.ky10.noarch 11/36
+ 升级 : openssl-1:1.1.1f-15.p28.ky10.aarch64 12/36
+ 升级 : openssl-perl-1:1.1.1f-15.p28.ky10.aarch64 13/36
+ 安装 : readline-devel-8.0-3.ky10.aarch64 14/36
+ 升级 : perl-devel-4:5.28.3-7.se.01.p04.ky10.aarch64 15/36
+ 升级 : libxml2-devel-2.9.10-43.p02.ky10.aarch64 16/36
+ 升级 : openssl-devel-1:1.1.1f-15.p28.ky10.aarch64 17/36
+ 升级 : python3-libxml2-2.9.10-43.p02.ky10.aarch64 18/36
+ 升级 : minizip-1.2.11-23.ky10.aarch64 19/36
+ 清理 : openssl-devel-1:1.1.1f-15.p05.ky10.aarch64 20/36
+ 清理 : openssl-perl-1:1.1.1f-15.p05.ky10.aarch64 21/36
+ 清理 : libxml2-devel-2.9.10-25.ky10.aarch64 22/36
+ 清理 : python3-libxml2-2.9.10-25.ky10.aarch64 23/36
+ 清理 : ncurses-6.2-3.ky10.aarch64 24/36
+ 清理 : perl-devel-4:5.28.3-7.se.01.ky10.aarch64 25/36
+ 清理 : perl-4:5.28.3-7.se.01.ky10.aarch64 26/36
+ 清理 : openssl-1:1.1.1f-15.p05.ky10.aarch64 27/36
+ 清理 : libxml2-2.9.10-25.ky10.aarch64 28/36
+ 运行脚本: libxml2-2.9.10-25.ky10.aarch64 28/36
+ 清理 : minizip-1.2.11-18.ky10.aarch64 29/36
+ 清理 : zlib-devel-1.2.11-18.ky10.aarch64 30/36
+ 清理 : openssl-libs-1:1.1.1f-15.p05.ky10.aarch64 31/36
+ 运行脚本: openssl-libs-1:1.1.1f-15.p05.ky10.aarch64 31/36
+ 清理 : ncurses-libs-6.2-3.ky10.aarch64 32/36
+ 清理 : ncurses-base-6.2-3.ky10.noarch 33/36
+ 清理 : openssl-help-1:1.1.1f-15.p05.ky10.noarch 34/36
+ 清理 : zlib-1.2.11-18.ky10.aarch64 35/36
+ 清理 : perl-libs-4:5.28.3-7.se.01.ky10.aarch64 36/36
+ 运行脚本: perl-libs-4:5.28.3-7.se.01.ky10.aarch64 36/36
+ 验证 : readline-devel-8.0-3.ky10.aarch64 1/36
+ 验证 : ncurses-devel-6.2-6.ky10.aarch64 2/36
+ 验证 : libxml2-2.9.10-43.p02.ky10.aarch64 3/36
+ 验证 : libxml2-2.9.10-25.ky10.aarch64 4/36
+ 验证 : libxml2-devel-2.9.10-43.p02.ky10.aarch64 5/36
+ 验证 : libxml2-devel-2.9.10-25.ky10.aarch64 6/36
+ 验证 : minizip-1.2.11-23.ky10.aarch64 7/36
+ 验证 : minizip-1.2.11-18.ky10.aarch64 8/36
+ 验证 : ncurses-6.2-6.ky10.aarch64 9/36
+ 验证 : ncurses-6.2-3.ky10.aarch64 10/36
+ 验证 : ncurses-base-6.2-6.ky10.noarch 11/36
+ 验证 : ncurses-base-6.2-3.ky10.noarch 12/36
+ 验证 : ncurses-libs-6.2-6.ky10.aarch64 13/36
+ 验证 : ncurses-libs-6.2-3.ky10.aarch64 14/36
+ 验证 : openssl-1:1.1.1f-15.p28.ky10.aarch64 15/36
+ 验证 : openssl-1:1.1.1f-15.p05.ky10.aarch64 16/36
+ 验证 : openssl-devel-1:1.1.1f-15.p28.ky10.aarch64 17/36
+ 验证 : openssl-devel-1:1.1.1f-15.p05.ky10.aarch64 18/36
+ 验证 : openssl-help-1:1.1.1f-15.p28.ky10.noarch 19/36
+ 验证 : openssl-help-1:1.1.1f-15.p05.ky10.noarch 20/36
+ 验证 : openssl-libs-1:1.1.1f-15.p28.ky10.aarch64 21/36
+ 验证 : openssl-libs-1:1.1.1f-15.p05.ky10.aarch64 22/36
+ 验证 : openssl-perl-1:1.1.1f-15.p28.ky10.aarch64 23/36
+ 验证 : openssl-perl-1:1.1.1f-15.p05.ky10.aarch64 24/36
+ 验证 : perl-4:5.28.3-7.se.01.p04.ky10.aarch64 25/36
+ 验证 : perl-4:5.28.3-7.se.01.ky10.aarch64 26/36
+ 验证 : perl-devel-4:5.28.3-7.se.01.p04.ky10.aarch64 27/36
+ 验证 : perl-devel-4:5.28.3-7.se.01.ky10.aarch64 28/36
+ 验证 : perl-libs-4:5.28.3-7.se.01.p04.ky10.aarch64 29/36
+ 验证 : perl-libs-4:5.28.3-7.se.01.ky10.aarch64 30/36
+ 验证 : python3-libxml2-2.9.10-43.p02.ky10.aarch64 31/36
+ 验证 : python3-libxml2-2.9.10-25.ky10.aarch64 32/36
+ 验证 : zlib-1.2.11-23.ky10.aarch64 33/36
+ 验证 : zlib-1.2.11-18.ky10.aarch64 34/36
+ 验证 : zlib-devel-1.2.11-23.ky10.aarch64 35/36
+ 验证 : zlib-devel-1.2.11-18.ky10.aarch64 36/36
+
+已升级:
+ libxml2-2.9.10-43.p02.ky10.aarch64 libxml2-devel-2.9.10-43.p02.ky10.aarch64
+ minizip-1.2.11-23.ky10.aarch64 ncurses-6.2-6.ky10.aarch64
+ ncurses-base-6.2-6.ky10.noarch ncurses-libs-6.2-6.ky10.aarch64
+ openssl-1:1.1.1f-15.p28.ky10.aarch64 openssl-devel-1:1.1.1f-15.p28.ky10.aarch64
+ openssl-help-1:1.1.1f-15.p28.ky10.noarch openssl-libs-1:1.1.1f-15.p28.ky10.aarch64
+ openssl-perl-1:1.1.1f-15.p28.ky10.aarch64 perl-4:5.28.3-7.se.01.p04.ky10.aarch64
+ perl-devel-4:5.28.3-7.se.01.p04.ky10.aarch64 perl-libs-4:5.28.3-7.se.01.p04.ky10.aarch64
+ python3-libxml2-2.9.10-43.p02.ky10.aarch64 zlib-1.2.11-23.ky10.aarch64
+ zlib-devel-1.2.11-23.ky10.aarch64
+
+已安装:
+ ncurses-devel-6.2-6.ky10.aarch64 readline-devel-8.0-3.ky10.aarch64
+
+完毕!
+[root@localhost ~]# wget -c http://nginx.org/download/nginx-1.28.0.tar.gz
+--2025-04-27 13:36:34-- http://nginx.org/download/nginx-1.28.0.tar.gz
+正在解析主机 nginx.org (nginx.org)... 52.58.199.22, 3.125.197.172, 2a05:d014:5c0:2601::6, ...
+正在连接 nginx.org (nginx.org)|52.58.199.22|:80... 已连接。
+已发出 HTTP 请求,正在等待回应... 200 OK
+长度:1280111 (1.2M) [application/octet-stream]
+正在保存至: “nginx-1.28.0.tar.gz”
+
+nginx-1.28.0.tar.gz 100%[==================================>] 1.22M 80.3KB/s 用时 21s
+
+2025-04-27 13:36:55 (59.8 KB/s) - 已保存 “nginx-1.28.0.tar.gz” [1280111/1280111])
+
+[root@localhost ~]# mv nginx-1.28.0.tar.gz /media/nginx
+[root@localhost ~]# cd /media/nginx
+-bash: cd: /media/nginx: 不是目录
+[root@localhost ~]# cd /media
+[root@localhost media]# mkdir -p /media/nginx
+mkdir: 无法创建目录 “/media/nginx”: 文件已存在
+[root@localhost media]# mkdir -p /media/nginx-server
+[root@localhost media]# mv nginx-1.28.0.tar.gz /media/nginx-server
+mv: 无法获取'nginx-1.28.0.tar.gz' 的文件状态(stat): 没有那个文件或目录
+[root@localhost media]# mv nginx /media/nginx-server
+[root@localhost media]# cd /media/nginx-server
+[root@localhost nginx-server]# tar -zxvf nginx.tar.gz
+tar (child): nginx.tar.gz:无法 open: 没有那个文件或目录
+tar (child): Error is not recoverable: exiting now
+tar: Child returned status 2
+tar: Error is not recoverable: exiting now
+[root@localhost nginx-server]# wget -c http://nginx.org/download/nginx-1.28.0.tar.gz
+--2025-04-27 13:40:30-- http://nginx.org/download/nginx-1.28.0.tar.gz
+正在解析主机 nginx.org (nginx.org)... 3.125.197.172, 52.58.199.22, 2a05:d014:5c0:2601::6, ...
+正在连接 nginx.org (nginx.org)|3.125.197.172|:80... 已连接。
+已发出 HTTP 请求,正在等待回应... 200 OK
+长度:1280111 (1.2M) [application/octet-stream]
+正在保存至: “nginx-1.28.0.tar.gz”
+
+nginx-1.28.0.tar.gz 100%[==================================>] 1.22M 427KB/s 用时 2.9s
+
+2025-04-27 13:40:34 (427 KB/s) - 已保存 “nginx-1.28.0.tar.gz” [1280111/1280111])
+
+[root@localhost nginx-server]# tar -zxvf nginx-1.28.o.tar.gz
+tar (child): nginx-1.28.o.tar.gz:无法 open: 没有那个文件或目录
+tar (child): Error is not recoverable: exiting now
+tar: Child returned status 2
+tar: Error is not recoverable: exiting now
+[root@localhost nginx-server]# tar -zxvf nginx-1.28.0.tar.gz
+nginx-1.28.0/
+nginx-1.28.0/man/
+nginx-1.28.0/CODE_OF_CONDUCT.md
+nginx-1.28.0/LICENSE
+nginx-1.28.0/configure
+nginx-1.28.0/auto/
+nginx-1.28.0/CHANGES
+nginx-1.28.0/CHANGES.ru
+nginx-1.28.0/html/
+nginx-1.28.0/contrib/
+nginx-1.28.0/README.md
+nginx-1.28.0/CONTRIBUTING.md
+nginx-1.28.0/conf/
+nginx-1.28.0/SECURITY.md
+nginx-1.28.0/src/
+nginx-1.28.0/src/misc/
+nginx-1.28.0/src/core/
+nginx-1.28.0/src/mail/
+nginx-1.28.0/src/stream/
+nginx-1.28.0/src/http/
+nginx-1.28.0/src/os/
+nginx-1.28.0/src/event/
+nginx-1.28.0/src/event/ngx_event_connectex.c
+nginx-1.28.0/src/event/ngx_event_accept.c
+nginx-1.28.0/src/event/ngx_event_posted.h
+nginx-1.28.0/src/event/ngx_event_acceptex.c
+nginx-1.28.0/src/event/ngx_event_udp.c
+nginx-1.28.0/src/event/ngx_event_openssl_stapling.c
+nginx-1.28.0/src/event/ngx_event_connect.c
+nginx-1.28.0/src/event/ngx_event.h
+nginx-1.28.0/src/event/ngx_event_timer.h
+nginx-1.28.0/src/event/ngx_event_openssl.c
+nginx-1.28.0/src/event/ngx_event_pipe.c
+nginx-1.28.0/src/event/ngx_event_openssl_cache.c
+nginx-1.28.0/src/event/ngx_event_posted.c
+nginx-1.28.0/src/event/ngx_event.c
+nginx-1.28.0/src/event/ngx_event_openssl.h
+nginx-1.28.0/src/event/ngx_event_timer.c
+nginx-1.28.0/src/event/ngx_event_connect.h
+nginx-1.28.0/src/event/ngx_event_udp.h
+nginx-1.28.0/src/event/modules/
+nginx-1.28.0/src/event/quic/
+nginx-1.28.0/src/event/ngx_event_pipe.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_ssl.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_connid.c
+nginx-1.28.0/src/event/quic/ngx_event_quic.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_migration.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_ack.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_udp.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_protection.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_tokens.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_socket.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_openssl_compat.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_output.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_streams.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_bpf_code.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_transport.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_frames.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_connection.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_migration.c
+nginx-1.28.0/src/event/quic/ngx_event_quic.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_connid.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_ssl.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_protection.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_ack.c
+nginx-1.28.0/src/event/quic/bpf/
+nginx-1.28.0/src/event/quic/ngx_event_quic_frames.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_transport.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_openssl_compat.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_socket.h
+nginx-1.28.0/src/event/quic/ngx_event_quic_bpf.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_streams.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_output.c
+nginx-1.28.0/src/event/quic/ngx_event_quic_tokens.c
+nginx-1.28.0/src/event/quic/bpf/makefile
+nginx-1.28.0/src/event/quic/bpf/ngx_quic_reuseport_helper.c
+nginx-1.28.0/src/event/quic/bpf/bpfgen.sh
+nginx-1.28.0/src/event/modules/ngx_epoll_module.c
+nginx-1.28.0/src/event/modules/ngx_iocp_module.h
+nginx-1.28.0/src/event/modules/ngx_win32_poll_module.c
+nginx-1.28.0/src/event/modules/ngx_kqueue_module.c
+nginx-1.28.0/src/event/modules/ngx_win32_select_module.c
+nginx-1.28.0/src/event/modules/ngx_iocp_module.c
+nginx-1.28.0/src/event/modules/ngx_select_module.c
+nginx-1.28.0/src/event/modules/ngx_eventport_module.c
+nginx-1.28.0/src/event/modules/ngx_devpoll_module.c
+nginx-1.28.0/src/event/modules/ngx_poll_module.c
+nginx-1.28.0/src/os/unix/
+nginx-1.28.0/src/os/win32/
+nginx-1.28.0/src/os/win32/nginx.ico
+nginx-1.28.0/src/os/win32/ngx_time.c
+nginx-1.28.0/src/os/win32/ngx_wsarecv_chain.c
+nginx-1.28.0/src/os/win32/ngx_alloc.c
+nginx-1.28.0/src/os/win32/ngx_win32_init.c
+nginx-1.28.0/src/os/win32/nginx_icon16.xpm
+nginx-1.28.0/src/os/win32/ngx_dlopen.h
+nginx-1.28.0/src/os/win32/ngx_files.c
+nginx-1.28.0/src/os/win32/ngx_event_log.c
+nginx-1.28.0/src/os/win32/ngx_socket.c
+nginx-1.28.0/src/os/win32/ngx_win32_config.h
+nginx-1.28.0/src/os/win32/ngx_user.h
+nginx-1.28.0/src/os/win32/ngx_errno.c
+nginx-1.28.0/src/os/win32/ngx_process_cycle.h
+nginx-1.28.0/src/os/win32/ngx_wsasend.c
+nginx-1.28.0/src/os/win32/ngx_process.h
+nginx-1.28.0/src/os/win32/ngx_shmem.c
+nginx-1.28.0/src/os/win32/ngx_thread.c
+nginx-1.28.0/src/os/win32/ngx_stat.c
+nginx-1.28.0/src/os/win32/ngx_service.c
+nginx-1.28.0/src/os/win32/nginx_icon48.xpm
+nginx-1.28.0/src/os/win32/ngx_os.h
+nginx-1.28.0/src/os/win32/ngx_wsasend_chain.c
+nginx-1.28.0/src/os/win32/nginx.rc
+nginx-1.28.0/src/os/win32/ngx_dlopen.c
+nginx-1.28.0/src/os/win32/ngx_alloc.h
+nginx-1.28.0/src/os/win32/ngx_time.h
+nginx-1.28.0/src/os/win32/ngx_udp_wsarecv.c
+nginx-1.28.0/src/os/win32/ngx_errno.h
+nginx-1.28.0/src/os/win32/ngx_atomic.h
+nginx-1.28.0/src/os/win32/ngx_user.c
+nginx-1.28.0/src/os/win32/ngx_socket.h
+nginx-1.28.0/src/os/win32/ngx_files.h
+nginx-1.28.0/src/os/win32/ngx_thread.h
+nginx-1.28.0/src/os/win32/ngx_shmem.h
+nginx-1.28.0/src/os/win32/ngx_process.c
+nginx-1.28.0/src/os/win32/ngx_process_cycle.c
+nginx-1.28.0/src/os/win32/ngx_wsarecv.c
+nginx-1.28.0/src/os/win32/nginx_icon32.xpm
+nginx-1.28.0/src/os/unix/ngx_udp_send.c
+nginx-1.28.0/src/os/unix/ngx_freebsd_sendfile_chain.c
+nginx-1.28.0/src/os/unix/ngx_time.c
+nginx-1.28.0/src/os/unix/ngx_recv.c
+nginx-1.28.0/src/os/unix/ngx_solaris_sendfilev_chain.c
+nginx-1.28.0/src/os/unix/ngx_writev_chain.c
+nginx-1.28.0/src/os/unix/ngx_alloc.c
+nginx-1.28.0/src/os/unix/ngx_darwin_init.c
+nginx-1.28.0/src/os/unix/ngx_dlopen.h
+nginx-1.28.0/src/os/unix/ngx_linux.h
+nginx-1.28.0/src/os/unix/ngx_files.c
+nginx-1.28.0/src/os/unix/ngx_socket.c
+nginx-1.28.0/src/os/unix/ngx_readv_chain.c
+nginx-1.28.0/src/os/unix/ngx_thread_id.c
+nginx-1.28.0/src/os/unix/ngx_darwin_config.h
+nginx-1.28.0/src/os/unix/ngx_sunpro_atomic_sparc64.h
+nginx-1.28.0/src/os/unix/ngx_gcc_atomic_sparc64.h
+nginx-1.28.0/src/os/unix/ngx_user.h
+nginx-1.28.0/src/os/unix/ngx_errno.c
+nginx-1.28.0/src/os/unix/ngx_process_cycle.h
+nginx-1.28.0/src/os/unix/ngx_process.h
+nginx-1.28.0/src/os/unix/ngx_shmem.c
+nginx-1.28.0/src/os/unix/ngx_sunpro_amd64.il
+nginx-1.28.0/src/os/unix/ngx_freebsd.h
+nginx-1.28.0/src/os/unix/ngx_gcc_atomic_ppc.h
+nginx-1.28.0/src/os/unix/ngx_darwin_sendfile_chain.c
+nginx-1.28.0/src/os/unix/ngx_sunpro_x86.il
+nginx-1.28.0/src/os/unix/ngx_os.h
+nginx-1.28.0/src/os/unix/ngx_sunpro_sparc64.il
+nginx-1.28.0/src/os/unix/ngx_linux_config.h
+nginx-1.28.0/src/os/unix/ngx_setaffinity.h
+nginx-1.28.0/src/os/unix/ngx_linux_init.c
+nginx-1.28.0/src/os/unix/ngx_setproctitle.c
+nginx-1.28.0/src/os/unix/ngx_file_aio_read.c
+nginx-1.28.0/src/os/unix/ngx_channel.c
+nginx-1.28.0/src/os/unix/ngx_solaris_config.h
+nginx-1.28.0/src/os/unix/ngx_dlopen.c
+nginx-1.28.0/src/os/unix/ngx_gcc_atomic_x86.h
+nginx-1.28.0/src/os/unix/ngx_alloc.h
+nginx-1.28.0/src/os/unix/ngx_posix_init.c
+nginx-1.28.0/src/os/unix/ngx_time.h
+nginx-1.28.0/src/os/unix/ngx_posix_config.h
+nginx-1.28.0/src/os/unix/ngx_solaris_init.c
+nginx-1.28.0/src/os/unix/ngx_darwin.h
+nginx-1.28.0/src/os/unix/ngx_errno.h
+nginx-1.28.0/src/os/unix/ngx_udp_recv.c
+nginx-1.28.0/src/os/unix/ngx_atomic.h
+nginx-1.28.0/src/os/unix/ngx_user.c
+nginx-1.28.0/src/os/unix/ngx_send.c
+nginx-1.28.0/src/os/unix/ngx_socket.h
+nginx-1.28.0/src/os/unix/ngx_linux_sendfile_chain.c
+nginx-1.28.0/src/os/unix/ngx_solaris.h
+nginx-1.28.0/src/os/unix/ngx_files.h
+nginx-1.28.0/src/os/unix/ngx_setaffinity.c
+nginx-1.28.0/src/os/unix/ngx_linux_aio_read.c
+nginx-1.28.0/src/os/unix/ngx_thread.h
+nginx-1.28.0/src/os/unix/ngx_freebsd_config.h
+nginx-1.28.0/src/os/unix/ngx_shmem.h
+nginx-1.28.0/src/os/unix/ngx_process.c
+nginx-1.28.0/src/os/unix/ngx_process_cycle.c
+nginx-1.28.0/src/os/unix/ngx_thread_cond.c
+nginx-1.28.0/src/os/unix/ngx_channel.h
+nginx-1.28.0/src/os/unix/ngx_thread_mutex.c
+nginx-1.28.0/src/os/unix/ngx_setproctitle.h
+nginx-1.28.0/src/os/unix/ngx_daemon.c
+nginx-1.28.0/src/os/unix/ngx_gcc_atomic_amd64.h
+nginx-1.28.0/src/os/unix/ngx_freebsd_init.c
+nginx-1.28.0/src/os/unix/ngx_udp_sendmsg_chain.c
+nginx-1.28.0/src/http/ngx_http_header_filter_module.c
+nginx-1.28.0/src/http/ngx_http_huff_decode.c
+nginx-1.28.0/src/http/ngx_http_core_module.h
+nginx-1.28.0/src/http/ngx_http_variables.h
+nginx-1.28.0/src/http/ngx_http_huff_encode.c
+nginx-1.28.0/src/http/ngx_http_special_response.c
+nginx-1.28.0/src/http/ngx_http.c
+nginx-1.28.0/src/http/ngx_http_write_filter_module.c
+nginx-1.28.0/src/http/ngx_http_upstream.h
+nginx-1.28.0/src/http/ngx_http_request_body.c
+nginx-1.28.0/src/http/ngx_http_postpone_filter_module.c
+nginx-1.28.0/src/http/ngx_http_script.c
+nginx-1.28.0/src/http/ngx_http_upstream_round_robin.c
+nginx-1.28.0/src/http/ngx_http_cache.h
+nginx-1.28.0/src/http/ngx_http_file_cache.c
+nginx-1.28.0/src/http/ngx_http_request.c
+nginx-1.28.0/src/http/v2/
+nginx-1.28.0/src/http/ngx_http_variables.c
+nginx-1.28.0/src/http/ngx_http_core_module.c
+nginx-1.28.0/src/http/v3/
+nginx-1.28.0/src/http/ngx_http_upstream.c
+nginx-1.28.0/src/http/ngx_http.h
+nginx-1.28.0/src/http/ngx_http_upstream_round_robin.h
+nginx-1.28.0/src/http/ngx_http_config.h
+nginx-1.28.0/src/http/ngx_http_script.h
+nginx-1.28.0/src/http/modules/
+nginx-1.28.0/src/http/ngx_http_parse.c
+nginx-1.28.0/src/http/ngx_http_copy_filter_module.c
+nginx-1.28.0/src/http/ngx_http_request.h
+nginx-1.28.0/src/http/modules/ngx_http_scgi_module.c
+nginx-1.28.0/src/http/modules/ngx_http_dav_module.c
+nginx-1.28.0/src/http/modules/ngx_http_log_module.c
+nginx-1.28.0/src/http/modules/ngx_http_gzip_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_range_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_ssl_module.c
+nginx-1.28.0/src/http/modules/ngx_http_geo_module.c
+nginx-1.28.0/src/http/modules/ngx_http_grpc_module.c
+nginx-1.28.0/src/http/modules/ngx_http_image_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_charset_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_random_module.c
+nginx-1.28.0/src/http/modules/ngx_http_realip_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_hash_module.c
+nginx-1.28.0/src/http/modules/ngx_http_secure_link_module.c
+nginx-1.28.0/src/http/modules/ngx_http_autoindex_module.c
+nginx-1.28.0/src/http/modules/ngx_http_memcached_module.c
+nginx-1.28.0/src/http/modules/ngx_http_ssi_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_flv_module.c
+nginx-1.28.0/src/http/modules/perl/
+nginx-1.28.0/src/http/modules/ngx_http_xslt_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_slice_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_sub_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_browser_module.c
+nginx-1.28.0/src/http/modules/ngx_http_not_modified_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_try_files_module.c
+nginx-1.28.0/src/http/modules/ngx_http_index_module.c
+nginx-1.28.0/src/http/modules/ngx_http_auth_basic_module.c
+nginx-1.28.0/src/http/modules/ngx_http_rewrite_module.c
+nginx-1.28.0/src/http/modules/ngx_http_ssl_module.h
+nginx-1.28.0/src/http/modules/ngx_http_referer_module.c
+nginx-1.28.0/src/http/modules/ngx_http_empty_gif_module.c
+nginx-1.28.0/src/http/modules/ngx_http_mirror_module.c
+nginx-1.28.0/src/http/modules/ngx_http_access_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_ip_hash_module.c
+nginx-1.28.0/src/http/modules/ngx_http_mp4_module.c
+nginx-1.28.0/src/http/modules/ngx_http_limit_req_module.c
+nginx-1.28.0/src/http/modules/ngx_http_limit_conn_module.c
+nginx-1.28.0/src/http/modules/ngx_http_addition_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_split_clients_module.c
+nginx-1.28.0/src/http/modules/ngx_http_auth_request_module.c
+nginx-1.28.0/src/http/modules/ngx_http_degradation_module.c
+nginx-1.28.0/src/http/modules/ngx_http_fastcgi_module.c
+nginx-1.28.0/src/http/modules/ngx_http_gzip_static_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_zone_module.c
+nginx-1.28.0/src/http/modules/ngx_http_geoip_module.c
+nginx-1.28.0/src/http/modules/ngx_http_ssi_filter_module.h
+nginx-1.28.0/src/http/modules/ngx_http_random_index_module.c
+nginx-1.28.0/src/http/modules/ngx_http_userid_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_headers_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_uwsgi_module.c
+nginx-1.28.0/src/http/modules/ngx_http_map_module.c
+nginx-1.28.0/src/http/modules/ngx_http_gunzip_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_proxy_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_least_conn_module.c
+nginx-1.28.0/src/http/modules/ngx_http_upstream_keepalive_module.c
+nginx-1.28.0/src/http/modules/ngx_http_static_module.c
+nginx-1.28.0/src/http/modules/ngx_http_chunked_filter_module.c
+nginx-1.28.0/src/http/modules/ngx_http_stub_status_module.c
+nginx-1.28.0/src/http/modules/perl/ngx_http_perl_module.c
+nginx-1.28.0/src/http/modules/perl/nginx.pm
+nginx-1.28.0/src/http/modules/perl/typemap
+nginx-1.28.0/src/http/modules/perl/Makefile.PL
+nginx-1.28.0/src/http/modules/perl/ngx_http_perl_module.h
+nginx-1.28.0/src/http/modules/perl/nginx.xs
+nginx-1.28.0/src/http/v3/ngx_http_v3.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_table.h
+nginx-1.28.0/src/http/v3/ngx_http_v3_uni.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_encode.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_module.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_parse.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_request.c
+nginx-1.28.0/src/http/v3/ngx_http_v3.h
+nginx-1.28.0/src/http/v3/ngx_http_v3_table.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_uni.h
+nginx-1.28.0/src/http/v3/ngx_http_v3_encode.h
+nginx-1.28.0/src/http/v3/ngx_http_v3_filter_module.c
+nginx-1.28.0/src/http/v3/ngx_http_v3_parse.h
+nginx-1.28.0/src/http/v2/ngx_http_v2.h
+nginx-1.28.0/src/http/v2/ngx_http_v2_encode.c
+nginx-1.28.0/src/http/v2/ngx_http_v2_module.c
+nginx-1.28.0/src/http/v2/ngx_http_v2_filter_module.c
+nginx-1.28.0/src/http/v2/ngx_http_v2_table.c
+nginx-1.28.0/src/http/v2/ngx_http_v2.c
+nginx-1.28.0/src/http/v2/ngx_http_v2_module.h
+nginx-1.28.0/src/stream/ngx_stream_limit_conn_module.c
+nginx-1.28.0/src/stream/ngx_stream_set_module.c
+nginx-1.28.0/src/stream/ngx_stream_access_module.c
+nginx-1.28.0/src/stream/ngx_stream_ssl_module.h
+nginx-1.28.0/src/stream/ngx_stream_upstream_random_module.c
+nginx-1.28.0/src/stream/ngx_stream_upstream_round_robin.h
+nginx-1.28.0/src/stream/ngx_stream_split_clients_module.c
+nginx-1.28.0/src/stream/ngx_stream_handler.c
+nginx-1.28.0/src/stream/ngx_stream_upstream_zone_module.c
+nginx-1.28.0/src/stream/ngx_stream.h
+nginx-1.28.0/src/stream/ngx_stream_variables.h
+nginx-1.28.0/src/stream/ngx_stream_upstream_least_conn_module.c
+nginx-1.28.0/src/stream/ngx_stream_pass_module.c
+nginx-1.28.0/src/stream/ngx_stream_core_module.c
+nginx-1.28.0/src/stream/ngx_stream_script.h
+nginx-1.28.0/src/stream/ngx_stream_upstream.h
+nginx-1.28.0/src/stream/ngx_stream_return_module.c
+nginx-1.28.0/src/stream/ngx_stream_map_module.c
+nginx-1.28.0/src/stream/ngx_stream_geo_module.c
+nginx-1.28.0/src/stream/ngx_stream_realip_module.c
+nginx-1.28.0/src/stream/ngx_stream_ssl_preread_module.c
+nginx-1.28.0/src/stream/ngx_stream_log_module.c
+nginx-1.28.0/src/stream/ngx_stream_ssl_module.c
+nginx-1.28.0/src/stream/ngx_stream_variables.c
+nginx-1.28.0/src/stream/ngx_stream.c
+nginx-1.28.0/src/stream/ngx_stream_upstream_round_robin.c
+nginx-1.28.0/src/stream/ngx_stream_geoip_module.c
+nginx-1.28.0/src/stream/ngx_stream_upstream_hash_module.c
+nginx-1.28.0/src/stream/ngx_stream_proxy_module.c
+nginx-1.28.0/src/stream/ngx_stream_write_filter_module.c
+nginx-1.28.0/src/stream/ngx_stream_script.c
+nginx-1.28.0/src/stream/ngx_stream_upstream.c
+nginx-1.28.0/src/mail/ngx_mail_auth_http_module.c
+nginx-1.28.0/src/mail/ngx_mail_realip_module.c
+nginx-1.28.0/src/mail/ngx_mail_imap_handler.c
+nginx-1.28.0/src/mail/ngx_mail.h
+nginx-1.28.0/src/mail/ngx_mail_smtp_handler.c
+nginx-1.28.0/src/mail/ngx_mail_parse.c
+nginx-1.28.0/src/mail/ngx_mail_ssl_module.c
+nginx-1.28.0/src/mail/ngx_mail_imap_module.c
+nginx-1.28.0/src/mail/ngx_mail_pop3_module.c
+nginx-1.28.0/src/mail/ngx_mail_smtp_module.c
+nginx-1.28.0/src/mail/ngx_mail_core_module.c
+nginx-1.28.0/src/mail/ngx_mail_proxy_module.c
+nginx-1.28.0/src/mail/ngx_mail.c
+nginx-1.28.0/src/mail/ngx_mail_imap_module.h
+nginx-1.28.0/src/mail/ngx_mail_pop3_handler.c
+nginx-1.28.0/src/mail/ngx_mail_ssl_module.h
+nginx-1.28.0/src/mail/ngx_mail_handler.c
+nginx-1.28.0/src/mail/ngx_mail_smtp_module.h
+nginx-1.28.0/src/mail/ngx_mail_pop3_module.h
+nginx-1.28.0/src/core/ngx_rwlock.h
+nginx-1.28.0/src/core/ngx_thread_pool.c
+nginx-1.28.0/src/core/ngx_shmtx.h
+nginx-1.28.0/src/core/ngx_regex.h
+nginx-1.28.0/src/core/ngx_log.c
+nginx-1.28.0/src/core/ngx_queue.c
+nginx-1.28.0/src/core/ngx_cpuinfo.c
+nginx-1.28.0/src/core/ngx_cycle.h
+nginx-1.28.0/src/core/ngx_module.c
+nginx-1.28.0/src/core/ngx_crc32.c
+nginx-1.28.0/src/core/ngx_palloc.c
+nginx-1.28.0/src/core/ngx_list.h
+nginx-1.28.0/src/core/ngx_slab.h
+nginx-1.28.0/src/core/ngx_times.h
+nginx-1.28.0/src/core/ngx_radix_tree.c
+nginx-1.28.0/src/core/ngx_output_chain.c
+nginx-1.28.0/src/core/ngx_murmurhash.c
+nginx-1.28.0/src/core/ngx_syslog.h
+nginx-1.28.0/src/core/ngx_parse.h
+nginx-1.28.0/src/core/ngx_open_file_cache.c
+nginx-1.28.0/src/core/ngx_string.c
+nginx-1.28.0/src/core/ngx_crc.h
+nginx-1.28.0/src/core/nginx.h
+nginx-1.28.0/src/core/ngx_bpf.h
+nginx-1.28.0/src/core/ngx_proxy_protocol.h
+nginx-1.28.0/src/core/ngx_parse_time.h
+nginx-1.28.0/src/core/ngx_array.c
+nginx-1.28.0/src/core/ngx_inet.c
+nginx-1.28.0/src/core/ngx_sha1.h
+nginx-1.28.0/src/core/ngx_rbtree.c
+nginx-1.28.0/src/core/ngx_file.c
+nginx-1.28.0/src/core/ngx_md5.h
+nginx-1.28.0/src/core/ngx_connection.h
+nginx-1.28.0/src/core/ngx_resolver.c
+nginx-1.28.0/src/core/ngx_buf.h
+nginx-1.28.0/src/core/ngx_crypt.c
+nginx-1.28.0/src/core/ngx_config.h
+nginx-1.28.0/src/core/ngx_conf_file.c
+nginx-1.28.0/src/core/ngx_hash.h
+nginx-1.28.0/src/core/ngx_crc32.h
+nginx-1.28.0/src/core/ngx_slab.c
+nginx-1.28.0/src/core/ngx_list.c
+nginx-1.28.0/src/core/ngx_palloc.h
+nginx-1.28.0/src/core/ngx_module.h
+nginx-1.28.0/src/core/ngx_core.h
+nginx-1.28.0/src/core/ngx_queue.h
+nginx-1.28.0/src/core/ngx_log.h
+nginx-1.28.0/src/core/ngx_cycle.c
+nginx-1.28.0/src/core/ngx_shmtx.c
+nginx-1.28.0/src/core/ngx_regex.c
+nginx-1.28.0/src/core/ngx_thread_pool.h
+nginx-1.28.0/src/core/ngx_rwlock.c
+nginx-1.28.0/src/core/ngx_open_file_cache.h
+nginx-1.28.0/src/core/ngx_string.h
+nginx-1.28.0/src/core/ngx_syslog.c
+nginx-1.28.0/src/core/ngx_murmurhash.h
+nginx-1.28.0/src/core/ngx_parse.c
+nginx-1.28.0/src/core/ngx_radix_tree.h
+nginx-1.28.0/src/core/ngx_times.c
+nginx-1.28.0/src/core/ngx_file.h
+nginx-1.28.0/src/core/ngx_resolver.h
+nginx-1.28.0/src/core/ngx_md5.c
+nginx-1.28.0/src/core/ngx_connection.c
+nginx-1.28.0/src/core/ngx_sha1.c
+nginx-1.28.0/src/core/ngx_inet.h
+nginx-1.28.0/src/core/ngx_array.h
+nginx-1.28.0/src/core/ngx_rbtree.h
+nginx-1.28.0/src/core/ngx_proxy_protocol.c
+nginx-1.28.0/src/core/ngx_parse_time.c
+nginx-1.28.0/src/core/nginx.c
+nginx-1.28.0/src/core/ngx_bpf.c
+nginx-1.28.0/src/core/ngx_conf_file.h
+nginx-1.28.0/src/core/ngx_hash.c
+nginx-1.28.0/src/core/ngx_crypt.h
+nginx-1.28.0/src/core/ngx_spinlock.c
+nginx-1.28.0/src/core/ngx_buf.c
+nginx-1.28.0/src/misc/ngx_google_perftools_module.c
+nginx-1.28.0/src/misc/ngx_cpp_test_module.cpp
+nginx-1.28.0/conf/fastcgi_params
+nginx-1.28.0/conf/scgi_params
+nginx-1.28.0/conf/fastcgi.conf
+nginx-1.28.0/conf/uwsgi_params
+nginx-1.28.0/conf/koi-win
+nginx-1.28.0/conf/mime.types
+nginx-1.28.0/conf/koi-utf
+nginx-1.28.0/conf/win-utf
+nginx-1.28.0/conf/nginx.conf
+nginx-1.28.0/contrib/vim/
+nginx-1.28.0/contrib/README
+nginx-1.28.0/contrib/geo2nginx.pl
+nginx-1.28.0/contrib/unicode2nginx/
+nginx-1.28.0/contrib/unicode2nginx/unicode-to-nginx.pl
+nginx-1.28.0/contrib/unicode2nginx/koi-utf
+nginx-1.28.0/contrib/unicode2nginx/win-utf
+nginx-1.28.0/contrib/vim/indent/
+nginx-1.28.0/contrib/vim/ftplugin/
+nginx-1.28.0/contrib/vim/ftdetect/
+nginx-1.28.0/contrib/vim/syntax/
+nginx-1.28.0/contrib/vim/syntax/nginx.vim
+nginx-1.28.0/contrib/vim/ftdetect/nginx.vim
+nginx-1.28.0/contrib/vim/ftplugin/nginx.vim
+nginx-1.28.0/contrib/vim/indent/nginx.vim
+nginx-1.28.0/html/index.html
+nginx-1.28.0/html/50x.html
+nginx-1.28.0/auto/stubs
+nginx-1.28.0/auto/init
+nginx-1.28.0/auto/options
+nginx-1.28.0/auto/types/
+nginx-1.28.0/auto/install
+nginx-1.28.0/auto/define
+nginx-1.28.0/auto/have
+nginx-1.28.0/auto/endianness
+nginx-1.28.0/auto/nohave
+nginx-1.28.0/auto/module
+nginx-1.28.0/auto/include
+nginx-1.28.0/auto/have_headers
+nginx-1.28.0/auto/unix
+nginx-1.28.0/auto/threads
+nginx-1.28.0/auto/headers
+nginx-1.28.0/auto/make
+nginx-1.28.0/auto/lib/
+nginx-1.28.0/auto/sources
+nginx-1.28.0/auto/os/
+nginx-1.28.0/auto/feature
+nginx-1.28.0/auto/cc/
+nginx-1.28.0/auto/modules
+nginx-1.28.0/auto/summary
+nginx-1.28.0/auto/cc/clang
+nginx-1.28.0/auto/cc/msvc
+nginx-1.28.0/auto/cc/sunc
+nginx-1.28.0/auto/cc/owc
+nginx-1.28.0/auto/cc/gcc
+nginx-1.28.0/auto/cc/ccc
+nginx-1.28.0/auto/cc/icc
+nginx-1.28.0/auto/cc/bcc
+nginx-1.28.0/auto/cc/name
+nginx-1.28.0/auto/cc/conf
+nginx-1.28.0/auto/cc/acc
+nginx-1.28.0/auto/os/freebsd
+nginx-1.28.0/auto/os/linux
+nginx-1.28.0/auto/os/darwin
+nginx-1.28.0/auto/os/solaris
+nginx-1.28.0/auto/os/win32
+nginx-1.28.0/auto/os/conf
+nginx-1.28.0/auto/lib/libgd/
+nginx-1.28.0/auto/lib/libatomic/
+nginx-1.28.0/auto/lib/libxslt/
+nginx-1.28.0/auto/lib/perl/
+nginx-1.28.0/auto/lib/geoip/
+nginx-1.28.0/auto/lib/zlib/
+nginx-1.28.0/auto/lib/google-perftools/
+nginx-1.28.0/auto/lib/make
+nginx-1.28.0/auto/lib/pcre/
+nginx-1.28.0/auto/lib/openssl/
+nginx-1.28.0/auto/lib/conf
+nginx-1.28.0/auto/lib/openssl/makefile.bcc
+nginx-1.28.0/auto/lib/openssl/makefile.msvc
+nginx-1.28.0/auto/lib/openssl/make
+nginx-1.28.0/auto/lib/openssl/conf
+nginx-1.28.0/auto/lib/pcre/makefile.bcc
+nginx-1.28.0/auto/lib/pcre/makefile.msvc
+nginx-1.28.0/auto/lib/pcre/make
+nginx-1.28.0/auto/lib/pcre/conf
+nginx-1.28.0/auto/lib/pcre/makefile.owc
+nginx-1.28.0/auto/lib/google-perftools/conf
+nginx-1.28.0/auto/lib/zlib/makefile.bcc
+nginx-1.28.0/auto/lib/zlib/makefile.msvc
+nginx-1.28.0/auto/lib/zlib/make
+nginx-1.28.0/auto/lib/zlib/conf
+nginx-1.28.0/auto/lib/zlib/makefile.owc
+nginx-1.28.0/auto/lib/geoip/conf
+nginx-1.28.0/auto/lib/perl/make
+nginx-1.28.0/auto/lib/perl/conf
+nginx-1.28.0/auto/lib/libxslt/conf
+nginx-1.28.0/auto/lib/libatomic/make
+nginx-1.28.0/auto/lib/libatomic/conf
+nginx-1.28.0/auto/lib/libgd/conf
+nginx-1.28.0/auto/types/typedef
+nginx-1.28.0/auto/types/value
+nginx-1.28.0/auto/types/uintptr_t
+nginx-1.28.0/auto/types/sizeof
+nginx-1.28.0/man/nginx.8
+[root@localhost nginx-server]# cd nginx-1.28.0
+[root@localhost nginx-1.28.0]# ./configure
+checking for OS
+ + Linux 4.19.90-52.22.v2207.ky10.aarch64 aarch64
+checking for C compiler ... found
+ + using GNU C compiler
+ + gcc version: 7.3.0 (GCC)
+checking for gcc -pipe switch ... found
+checking for -Wl,-E switch ... found
+checking for gcc builtin atomic operations ... found
+checking for C99 variadic macros ... found
+checking for gcc variadic macros ... found
+checking for gcc builtin 64 bit byteswap ... found
+checking for unistd.h ... found
+checking for inttypes.h ... found
+checking for limits.h ... found
+checking for sys/filio.h ... not found
+checking for sys/param.h ... found
+checking for sys/mount.h ... found
+checking for sys/statvfs.h ... found
+checking for crypt.h ... found
+checking for Linux specific features
+checking for epoll ... found
+checking for EPOLLRDHUP ... found
+checking for EPOLLEXCLUSIVE ... found
+checking for eventfd() ... found
+checking for O_PATH ... found
+checking for sendfile() ... found
+checking for sendfile64() ... found
+checking for sys/prctl.h ... found
+checking for prctl(PR_SET_DUMPABLE) ... found
+checking for prctl(PR_SET_KEEPCAPS) ... found
+checking for capabilities ... found
+checking for crypt_r() ... found
+checking for sys/vfs.h ... found
+checking for BPF sockhash ... found
+checking for SO_COOKIE ... found
+checking for UDP_SEGMENT ... not found
+checking for nobody group ... found
+checking for poll() ... found
+checking for /dev/poll ... not found
+checking for kqueue ... not found
+checking for crypt() ... not found
+checking for crypt() in libcrypt ... found
+checking for F_READAHEAD ... not found
+checking for posix_fadvise() ... found
+checking for O_DIRECT ... found
+checking for F_NOCACHE ... not found
+checking for directio() ... not found
+checking for statfs() ... found
+checking for statvfs() ... found
+checking for dlopen() ... not found
+checking for dlopen() in libdl ... found
+checking for sched_yield() ... found
+checking for sched_setaffinity() ... found
+checking for SO_SETFIB ... not found
+checking for SO_REUSEPORT ... found
+checking for SO_ACCEPTFILTER ... not found
+checking for SO_BINDANY ... not found
+checking for IP_TRANSPARENT ... found
+checking for IP_BINDANY ... not found
+checking for IP_BIND_ADDRESS_NO_PORT ... found
+checking for IP_RECVDSTADDR ... not found
+checking for IP_SENDSRCADDR ... not found
+checking for IP_PKTINFO ... found
+checking for IPV6_RECVPKTINFO ... found
+checking for IP_MTU_DISCOVER ... found
+checking for IPV6_MTU_DISCOVER ... found
+checking for IP_DONTFRAG ... not found
+checking for IPV6_DONTFRAG ... found
+checking for TCP_DEFER_ACCEPT ... found
+checking for TCP_KEEPIDLE ... found
+checking for TCP_FASTOPEN ... found
+checking for TCP_INFO ... found
+checking for accept4() ... found
+checking for int size ... 4 bytes
+checking for long size ... 8 bytes
+checking for long long size ... 8 bytes
+checking for void * size ... 8 bytes
+checking for uint32_t ... found
+checking for uint64_t ... found
+checking for sig_atomic_t ... found
+checking for sig_atomic_t size ... 4 bytes
+checking for socklen_t ... found
+checking for in_addr_t ... found
+checking for in_port_t ... found
+checking for rlim_t ... found
+checking for uintptr_t ... uintptr_t found
+checking for system byte ordering ... little endian
+checking for size_t size ... 8 bytes
+checking for off_t size ... 8 bytes
+checking for time_t size ... 8 bytes
+checking for AF_INET6 ... found
+checking for setproctitle() ... not found
+checking for pread() ... found
+checking for pwrite() ... found
+checking for pwritev() ... found
+checking for strerrordesc_np() ... not found
+checking for sys_nerr ... found
+checking for localtime_r() ... found
+checking for clock_gettime(CLOCK_MONOTONIC) ... found
+checking for posix_memalign() ... found
+checking for memalign() ... found
+checking for mmap(MAP_ANON|MAP_SHARED) ... found
+checking for mmap("/dev/zero", MAP_SHARED) ... found
+checking for System V shared memory ... found
+checking for POSIX semaphores ... not found
+checking for POSIX semaphores in libpthread ... found
+checking for struct msghdr.msg_control ... found
+checking for ioctl(FIONBIO) ... found
+checking for ioctl(FIONREAD) ... found
+checking for struct tm.tm_gmtoff ... found
+checking for struct dirent.d_namlen ... not found
+checking for struct dirent.d_type ... found
+checking for sysconf(_SC_NPROCESSORS_ONLN) ... found
+checking for sysconf(_SC_LEVEL1_DCACHE_LINESIZE) ... found
+checking for openat(), fstatat() ... found
+checking for getaddrinfo() ... found
+checking for PCRE2 library ... found
+checking for zlib library ... found
+creating objs/Makefile
+
+Configuration summary
+ + using system PCRE2 library
+ + OpenSSL library is not used
+ + using system zlib library
+
+ nginx path prefix: "/usr/local/nginx"
+ nginx binary file: "/usr/local/nginx/sbin/nginx"
+ nginx modules path: "/usr/local/nginx/modules"
+ nginx configuration prefix: "/usr/local/nginx/conf"
+ nginx configuration file: "/usr/local/nginx/conf/nginx.conf"
+ nginx pid file: "/usr/local/nginx/logs/nginx.pid"
+ nginx error log file: "/usr/local/nginx/logs/error.log"
+ nginx http access log file: "/usr/local/nginx/logs/access.log"
+ nginx http client request body temporary files: "client_body_temp"
+ nginx http proxy temporary files: "proxy_temp"
+ nginx http fastcgi temporary files: "fastcgi_temp"
+ nginx http uwsgi temporary files: "uwsgi_temp"
+ nginx http scgi temporary files: "scgi_temp"
+
+[root@localhost nginx-1.28.0]# make -j4 && make install
+make -f objs/Makefile
+make[1]: 进入目录“/media/nginx-server/nginx-1.28.0”
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/nginx.o \
+ src/core/nginx.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_log.o \
+ src/core/ngx_log.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_palloc.o \
+ src/core/ngx_palloc.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_array.o \
+ src/core/ngx_array.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_list.o \
+ src/core/ngx_list.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_hash.o \
+ src/core/ngx_hash.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_buf.o \
+ src/core/ngx_buf.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_queue.o \
+ src/core/ngx_queue.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_output_chain.o \
+ src/core/ngx_output_chain.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_string.o \
+ src/core/ngx_string.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_parse.o \
+ src/core/ngx_parse.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_parse_time.o \
+ src/core/ngx_parse_time.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_inet.o \
+ src/core/ngx_inet.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_file.o \
+ src/core/ngx_file.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_crc32.o \
+ src/core/ngx_crc32.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_murmurhash.o \
+ src/core/ngx_murmurhash.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_md5.o \
+ src/core/ngx_md5.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_sha1.o \
+ src/core/ngx_sha1.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_rbtree.o \
+ src/core/ngx_rbtree.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_radix_tree.o \
+ src/core/ngx_radix_tree.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_slab.o \
+ src/core/ngx_slab.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_times.o \
+ src/core/ngx_times.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_shmtx.o \
+ src/core/ngx_shmtx.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_connection.o \
+ src/core/ngx_connection.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_cycle.o \
+ src/core/ngx_cycle.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_spinlock.o \
+ src/core/ngx_spinlock.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_rwlock.o \
+ src/core/ngx_rwlock.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_cpuinfo.o \
+ src/core/ngx_cpuinfo.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_conf_file.o \
+ src/core/ngx_conf_file.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_module.o \
+ src/core/ngx_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_resolver.o \
+ src/core/ngx_resolver.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_open_file_cache.o \
+ src/core/ngx_open_file_cache.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_crypt.o \
+ src/core/ngx_crypt.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_proxy_protocol.o \
+ src/core/ngx_proxy_protocol.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_syslog.o \
+ src/core/ngx_syslog.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event.o \
+ src/event/ngx_event.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_timer.o \
+ src/event/ngx_event_timer.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_posted.o \
+ src/event/ngx_event_posted.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_accept.o \
+ src/event/ngx_event_accept.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_udp.o \
+ src/event/ngx_event_udp.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_connect.o \
+ src/event/ngx_event_connect.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/ngx_event_pipe.o \
+ src/event/ngx_event_pipe.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_time.o \
+ src/os/unix/ngx_time.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_errno.o \
+ src/os/unix/ngx_errno.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_alloc.o \
+ src/os/unix/ngx_alloc.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_files.o \
+ src/os/unix/ngx_files.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_socket.o \
+ src/os/unix/ngx_socket.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_recv.o \
+ src/os/unix/ngx_recv.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_readv_chain.o \
+ src/os/unix/ngx_readv_chain.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_udp_recv.o \
+ src/os/unix/ngx_udp_recv.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_send.o \
+ src/os/unix/ngx_send.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_writev_chain.o \
+ src/os/unix/ngx_writev_chain.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_udp_send.o \
+ src/os/unix/ngx_udp_send.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_udp_sendmsg_chain.o \
+ src/os/unix/ngx_udp_sendmsg_chain.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_channel.o \
+ src/os/unix/ngx_channel.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_shmem.o \
+ src/os/unix/ngx_shmem.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_process.o \
+ src/os/unix/ngx_process.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_daemon.o \
+ src/os/unix/ngx_daemon.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_setaffinity.o \
+ src/os/unix/ngx_setaffinity.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_setproctitle.o \
+ src/os/unix/ngx_setproctitle.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_posix_init.o \
+ src/os/unix/ngx_posix_init.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_user.o \
+ src/os/unix/ngx_user.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_dlopen.o \
+ src/os/unix/ngx_dlopen.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_process_cycle.o \
+ src/os/unix/ngx_process_cycle.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_linux_init.o \
+ src/os/unix/ngx_linux_init.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/event/modules/ngx_epoll_module.o \
+ src/event/modules/ngx_epoll_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/os/unix/ngx_linux_sendfile_chain.o \
+ src/os/unix/ngx_linux_sendfile_chain.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_bpf.o \
+ src/core/ngx_bpf.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/src/core/ngx_regex.o \
+ src/core/ngx_regex.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http.o \
+ src/http/ngx_http.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_core_module.o \
+ src/http/ngx_http_core_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_special_response.o \
+ src/http/ngx_http_special_response.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_request.o \
+ src/http/ngx_http_request.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_parse.o \
+ src/http/ngx_http_parse.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_log_module.o \
+ src/http/modules/ngx_http_log_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_request_body.o \
+ src/http/ngx_http_request_body.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_variables.o \
+ src/http/ngx_http_variables.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_script.o \
+ src/http/ngx_http_script.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_upstream.o \
+ src/http/ngx_http_upstream.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_upstream_round_robin.o \
+ src/http/ngx_http_upstream_round_robin.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_file_cache.o \
+ src/http/ngx_http_file_cache.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_write_filter_module.o \
+ src/http/ngx_http_write_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_header_filter_module.o \
+ src/http/ngx_http_header_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_chunked_filter_module.o \
+ src/http/modules/ngx_http_chunked_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_range_filter_module.o \
+ src/http/modules/ngx_http_range_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_gzip_filter_module.o \
+ src/http/modules/ngx_http_gzip_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_postpone_filter_module.o \
+ src/http/ngx_http_postpone_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_ssi_filter_module.o \
+ src/http/modules/ngx_http_ssi_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_charset_filter_module.o \
+ src/http/modules/ngx_http_charset_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_userid_filter_module.o \
+ src/http/modules/ngx_http_userid_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_headers_filter_module.o \
+ src/http/modules/ngx_http_headers_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/ngx_http_copy_filter_module.o \
+ src/http/ngx_http_copy_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_not_modified_filter_module.o \
+ src/http/modules/ngx_http_not_modified_filter_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_static_module.o \
+ src/http/modules/ngx_http_static_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_autoindex_module.o \
+ src/http/modules/ngx_http_autoindex_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_index_module.o \
+ src/http/modules/ngx_http_index_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_mirror_module.o \
+ src/http/modules/ngx_http_mirror_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_try_files_module.o \
+ src/http/modules/ngx_http_try_files_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_auth_basic_module.o \
+ src/http/modules/ngx_http_auth_basic_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_access_module.o \
+ src/http/modules/ngx_http_access_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_limit_conn_module.o \
+ src/http/modules/ngx_http_limit_conn_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_limit_req_module.o \
+ src/http/modules/ngx_http_limit_req_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_geo_module.o \
+ src/http/modules/ngx_http_geo_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_map_module.o \
+ src/http/modules/ngx_http_map_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_split_clients_module.o \
+ src/http/modules/ngx_http_split_clients_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_referer_module.o \
+ src/http/modules/ngx_http_referer_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_rewrite_module.o \
+ src/http/modules/ngx_http_rewrite_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_proxy_module.o \
+ src/http/modules/ngx_http_proxy_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_fastcgi_module.o \
+ src/http/modules/ngx_http_fastcgi_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_uwsgi_module.o \
+ src/http/modules/ngx_http_uwsgi_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_scgi_module.o \
+ src/http/modules/ngx_http_scgi_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_memcached_module.o \
+ src/http/modules/ngx_http_memcached_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_empty_gif_module.o \
+ src/http/modules/ngx_http_empty_gif_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_browser_module.o \
+ src/http/modules/ngx_http_browser_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_hash_module.o \
+ src/http/modules/ngx_http_upstream_hash_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_ip_hash_module.o \
+ src/http/modules/ngx_http_upstream_ip_hash_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_least_conn_module.o \
+ src/http/modules/ngx_http_upstream_least_conn_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_random_module.o \
+ src/http/modules/ngx_http_upstream_random_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_keepalive_module.o \
+ src/http/modules/ngx_http_upstream_keepalive_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs -I src/http -I src/http/modules \
+ -o objs/src/http/modules/ngx_http_upstream_zone_module.o \
+ src/http/modules/ngx_http_upstream_zone_module.c
+cc -c -pipe -O -W -Wall -Wpointer-arith -Wno-unused-parameter -Werror -g -I src/core -I src/event -I src/event/modules -I src/event/quic -I src/os/unix -I objs \
+ -o objs/ngx_modules.o \
+ objs/ngx_modules.c
+sed -e "s|%%PREFIX%%|/usr/local/nginx|" \
+ -e "s|%%PID_PATH%%|/usr/local/nginx/logs/nginx.pid|" \
+ -e "s|%%CONF_PATH%%|/usr/local/nginx/conf/nginx.conf|" \
+ -e "s|%%ERROR_LOG_PATH%%|/usr/local/nginx/logs/error.log|" \
+ < man/nginx.8 > objs/nginx.8
+cc -o objs/nginx \
+objs/src/core/nginx.o \
+objs/src/core/ngx_log.o \
+objs/src/core/ngx_palloc.o \
+objs/src/core/ngx_array.o \
+objs/src/core/ngx_list.o \
+objs/src/core/ngx_hash.o \
+objs/src/core/ngx_buf.o \
+objs/src/core/ngx_queue.o \
+objs/src/core/ngx_output_chain.o \
+objs/src/core/ngx_string.o \
+objs/src/core/ngx_parse.o \
+objs/src/core/ngx_parse_time.o \
+objs/src/core/ngx_inet.o \
+objs/src/core/ngx_file.o \
+objs/src/core/ngx_crc32.o \
+objs/src/core/ngx_murmurhash.o \
+objs/src/core/ngx_md5.o \
+objs/src/core/ngx_sha1.o \
+objs/src/core/ngx_rbtree.o \
+objs/src/core/ngx_radix_tree.o \
+objs/src/core/ngx_slab.o \
+objs/src/core/ngx_times.o \
+objs/src/core/ngx_shmtx.o \
+objs/src/core/ngx_connection.o \
+objs/src/core/ngx_cycle.o \
+objs/src/core/ngx_spinlock.o \
+objs/src/core/ngx_rwlock.o \
+objs/src/core/ngx_cpuinfo.o \
+objs/src/core/ngx_conf_file.o \
+objs/src/core/ngx_module.o \
+objs/src/core/ngx_resolver.o \
+objs/src/core/ngx_open_file_cache.o \
+objs/src/core/ngx_crypt.o \
+objs/src/core/ngx_proxy_protocol.o \
+objs/src/core/ngx_syslog.o \
+objs/src/event/ngx_event.o \
+objs/src/event/ngx_event_timer.o \
+objs/src/event/ngx_event_posted.o \
+objs/src/event/ngx_event_accept.o \
+objs/src/event/ngx_event_udp.o \
+objs/src/event/ngx_event_connect.o \
+objs/src/event/ngx_event_pipe.o \
+objs/src/os/unix/ngx_time.o \
+objs/src/os/unix/ngx_errno.o \
+objs/src/os/unix/ngx_alloc.o \
+objs/src/os/unix/ngx_files.o \
+objs/src/os/unix/ngx_socket.o \
+objs/src/os/unix/ngx_recv.o \
+objs/src/os/unix/ngx_readv_chain.o \
+objs/src/os/unix/ngx_udp_recv.o \
+objs/src/os/unix/ngx_send.o \
+objs/src/os/unix/ngx_writev_chain.o \
+objs/src/os/unix/ngx_udp_send.o \
+objs/src/os/unix/ngx_udp_sendmsg_chain.o \
+objs/src/os/unix/ngx_channel.o \
+objs/src/os/unix/ngx_shmem.o \
+objs/src/os/unix/ngx_process.o \
+objs/src/os/unix/ngx_daemon.o \
+objs/src/os/unix/ngx_setaffinity.o \
+objs/src/os/unix/ngx_setproctitle.o \
+objs/src/os/unix/ngx_posix_init.o \
+objs/src/os/unix/ngx_user.o \
+objs/src/os/unix/ngx_dlopen.o \
+objs/src/os/unix/ngx_process_cycle.o \
+objs/src/os/unix/ngx_linux_init.o \
+objs/src/event/modules/ngx_epoll_module.o \
+objs/src/os/unix/ngx_linux_sendfile_chain.o \
+objs/src/core/ngx_bpf.o \
+objs/src/core/ngx_regex.o \
+objs/src/http/ngx_http.o \
+objs/src/http/ngx_http_core_module.o \
+objs/src/http/ngx_http_special_response.o \
+objs/src/http/ngx_http_request.o \
+objs/src/http/ngx_http_parse.o \
+objs/src/http/modules/ngx_http_log_module.o \
+objs/src/http/ngx_http_request_body.o \
+objs/src/http/ngx_http_variables.o \
+objs/src/http/ngx_http_script.o \
+objs/src/http/ngx_http_upstream.o \
+objs/src/http/ngx_http_upstream_round_robin.o \
+objs/src/http/ngx_http_file_cache.o \
+objs/src/http/ngx_http_write_filter_module.o \
+objs/src/http/ngx_http_header_filter_module.o \
+objs/src/http/modules/ngx_http_chunked_filter_module.o \
+objs/src/http/modules/ngx_http_range_filter_module.o \
+objs/src/http/modules/ngx_http_gzip_filter_module.o \
+objs/src/http/ngx_http_postpone_filter_module.o \
+objs/src/http/modules/ngx_http_ssi_filter_module.o \
+objs/src/http/modules/ngx_http_charset_filter_module.o \
+objs/src/http/modules/ngx_http_userid_filter_module.o \
+objs/src/http/modules/ngx_http_headers_filter_module.o \
+objs/src/http/ngx_http_copy_filter_module.o \
+objs/src/http/modules/ngx_http_not_modified_filter_module.o \
+objs/src/http/modules/ngx_http_static_module.o \
+objs/src/http/modules/ngx_http_autoindex_module.o \
+objs/src/http/modules/ngx_http_index_module.o \
+objs/src/http/modules/ngx_http_mirror_module.o \
+objs/src/http/modules/ngx_http_try_files_module.o \
+objs/src/http/modules/ngx_http_auth_basic_module.o \
+objs/src/http/modules/ngx_http_access_module.o \
+objs/src/http/modules/ngx_http_limit_conn_module.o \
+objs/src/http/modules/ngx_http_limit_req_module.o \
+objs/src/http/modules/ngx_http_geo_module.o \
+objs/src/http/modules/ngx_http_map_module.o \
+objs/src/http/modules/ngx_http_split_clients_module.o \
+objs/src/http/modules/ngx_http_referer_module.o \
+objs/src/http/modules/ngx_http_rewrite_module.o \
+objs/src/http/modules/ngx_http_proxy_module.o \
+objs/src/http/modules/ngx_http_fastcgi_module.o \
+objs/src/http/modules/ngx_http_uwsgi_module.o \
+objs/src/http/modules/ngx_http_scgi_module.o \
+objs/src/http/modules/ngx_http_memcached_module.o \
+objs/src/http/modules/ngx_http_empty_gif_module.o \
+objs/src/http/modules/ngx_http_browser_module.o \
+objs/src/http/modules/ngx_http_upstream_hash_module.o \
+objs/src/http/modules/ngx_http_upstream_ip_hash_module.o \
+objs/src/http/modules/ngx_http_upstream_least_conn_module.o \
+objs/src/http/modules/ngx_http_upstream_random_module.o \
+objs/src/http/modules/ngx_http_upstream_keepalive_module.o \
+objs/src/http/modules/ngx_http_upstream_zone_module.o \
+objs/ngx_modules.o \
+-ldl -lpthread -lcrypt -lpcre2-8 -lz \
+-Wl,-E
+make[1]: 离开目录“/media/nginx-server/nginx-1.28.0”
+make -f objs/Makefile install
+make[1]: 进入目录“/media/nginx-server/nginx-1.28.0”
+test -d '/usr/local/nginx' || mkdir -p '/usr/local/nginx'
+test -d '/usr/local/nginx/sbin' \
+ || mkdir -p '/usr/local/nginx/sbin'
+test ! -f '/usr/local/nginx/sbin/nginx' \
+ || mv '/usr/local/nginx/sbin/nginx' \
+ '/usr/local/nginx/sbin/nginx.old'
+cp objs/nginx '/usr/local/nginx/sbin/nginx'
+test -d '/usr/local/nginx/conf' \
+ || mkdir -p '/usr/local/nginx/conf'
+cp conf/koi-win '/usr/local/nginx/conf'
+cp conf/koi-utf '/usr/local/nginx/conf'
+cp conf/win-utf '/usr/local/nginx/conf'
+test -f '/usr/local/nginx/conf/mime.types' \
+ || cp conf/mime.types '/usr/local/nginx/conf'
+cp conf/mime.types '/usr/local/nginx/conf/mime.types.default'
+test -f '/usr/local/nginx/conf/fastcgi_params' \
+ || cp conf/fastcgi_params '/usr/local/nginx/conf'
+cp conf/fastcgi_params \
+ '/usr/local/nginx/conf/fastcgi_params.default'
+test -f '/usr/local/nginx/conf/fastcgi.conf' \
+ || cp conf/fastcgi.conf '/usr/local/nginx/conf'
+cp conf/fastcgi.conf '/usr/local/nginx/conf/fastcgi.conf.default'
+test -f '/usr/local/nginx/conf/uwsgi_params' \
+ || cp conf/uwsgi_params '/usr/local/nginx/conf'
+cp conf/uwsgi_params \
+ '/usr/local/nginx/conf/uwsgi_params.default'
+test -f '/usr/local/nginx/conf/scgi_params' \
+ || cp conf/scgi_params '/usr/local/nginx/conf'
+cp conf/scgi_params \
+ '/usr/local/nginx/conf/scgi_params.default'
+test -f '/usr/local/nginx/conf/nginx.conf' \
+ || cp conf/nginx.conf '/usr/local/nginx/conf/nginx.conf'
+cp conf/nginx.conf '/usr/local/nginx/conf/nginx.conf.default'
+test -d '/usr/local/nginx/logs' \
+ || mkdir -p '/usr/local/nginx/logs'
+test -d '/usr/local/nginx/logs' \
+ || mkdir -p '/usr/local/nginx/logs'
+test -d '/usr/local/nginx/html' \
+ || cp -R html '/usr/local/nginx'
+test -d '/usr/local/nginx/logs' \
+ || mkdir -p '/usr/local/nginx/logs'
+make[1]: 离开目录“/media/nginx-server/nginx-1.28.0”
+[root@localhost nginx-1.28.0]# useradd nginx
+[root@localhost nginx-1.28.0]# chown nginx:nginx /media/nginx-server
+[root@localhost nginx-1.28.0]# chown nginx:nginx /usr/local/nginx
+[root@localhost nginx-1.28.0]#
+[root@localhost nginx-1.28.0]# cd /usr/local/nginx/sbin/
+[root@localhost sbin]#
+[root@localhost sbin]# ./nginx -v
+nginx version: nginx/1.28.0
+[root@localhost sbin]#
+[root@localhost sbin]# cd /usr/local/nginx/sbin/
+[root@localhost sbin]#
+[root@localhost sbin]# ./nginx
+[root@localhost sbin]# ps -ef | grep nginx
+root 874588 1 0 13:47 ? 00:00:00 nginx: master process ./nginx
+nobody 874589 874588 0 13:47 ? 00:00:00 nginx: worker process
+root 874592 869327 0 13:47 pts/7 00:00:00 grep nginx
+[root@localhost sbin]# cd /lib/systemd/system/
+[root@localhost system]# vim nginx.service
+[root@localhost system]# systemctl enable nginx
+Created symlink /etc/systemd/system/multi-user.target.wants/nginx.service → /usr/lib/systemd/system/nginx.service.
+[root@localhost system]# systemctl disable nginx
+Removed /etc/systemd/system/multi-user.target.wants/nginx.service.
+[root@localhost system]# systemctl enable nginx
+Created symlink /etc/systemd/system/multi-user.target.wants/nginx.service → /usr/lib/systemd/system/nginx.service.
+[root@localhost system]#
diff --git a/麒麟系统环境部署命令/3.txt b/麒麟系统环境部署命令/3.txt
new file mode 100644
index 0000000..8f80b95
--- /dev/null
+++ b/麒麟系统环境部署命令/3.txt
@@ -0,0 +1,3588 @@
+[root@localhost /]# cat /etc/os-release
+NAME="Kylin Linux Advanced Server"
+VERSION="V10 (Lance)"
+ID="kylin"
+VERSION_ID="V10"
+PRETTY_NAME="Kylin Linux Advanced Server V10 (Lance)"
+ANSI_COLOR="0;31"
+
+[root@localhost /]# uname -a
+Linux localhost.localdomain 4.19.90-52.22.v2207.ky10.aarch64 #1 SMP Tue Mar 14 11:52:45 CST 2023 aarch64 aarch64 aarch64 GNU/Linux
+[root@localhost /]# free -h
+ total used free shared buff/cache available
+Mem: 29Gi 2.6Gi 25Gi 42Mi 1.0Gi 23Gi
+Swap: 15Gi 0B 15Gi
+[root@localhost /]# df -h
+文件系统 容量 已用 可用 已用% 挂载点
+devtmpfs 15G 0 15G 0% /dev
+tmpfs 15G 64K 15G 1% /dev/shm
+tmpfs 15G 31M 15G 1% /run
+tmpfs 15G 0 15G 0% /sys/fs/cgroup
+/dev/mapper/klas-root 380G 12G 368G 4% /
+tmpfs 15G 64K 15G 1% /tmp
+/dev/sda2 1014M 165M 850M 17% /boot
+/dev/sda1 599M 6.5M 593M 2% /boot/efi
+/dev/sdb1 7.3T 52G 7.3T 1% /media/raid1
+tmpfs 3.0G 768K 3.0G 1% /run/user/1000
+tmpfs 3.0G 0 3.0G 0% /run/user/0
+[root@localhost /]# lscpu
+架构: aarch64
+CPU 运行模式: 64-bit
+字节序: Little Endian
+CPU: 64
+在线 CPU 列表: 0-63
+每个核的线程数: 1
+每个座的核数: 64
+座: 1
+NUMA 节点: 8
+厂商 ID: Phytium
+型号: 2
+型号名称: FT-2000+/64
+步进: 0x1
+BogoMIPS: 100.00
+L1d 缓存: 2 MiB
+L1i 缓存: 2 MiB
+L2 缓存: 32 MiB
+NUMA 节点0 CPU: 0-7
+NUMA 节点1 CPU: 8-15
+NUMA 节点2 CPU: 16-23
+NUMA 节点3 CPU: 24-31
+NUMA 节点4 CPU: 32-39
+NUMA 节点5 CPU: 40-47
+NUMA 节点6 CPU: 48-55
+NUMA 节点7 CPU: 56-63
+Vulnerability Itlb multihit: Not affected
+Vulnerability L1tf: Not affected
+Vulnerability Mds: Not affected
+Vulnerability Meltdown: Not affected
+Vulnerability Mmio stale data: Not affected
+Vulnerability Spec store bypass: Not affected
+Vulnerability Spectre v1: Mitigation; __user pointer sanitization
+Vulnerability Spectre v2: Not affected
+Vulnerability Srbds: Not affected
+Vulnerability Tsx async abort: Not affected
+标记: fp asimd evtstrm crc32 cpuid
+[root@localhost /]# ^C
+
+Last login: Sun Apr 27 15:23:20 2025 from 10.42.0.91
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|L|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# java -version
+openjdk version "1.8.0_312"
+OpenJDK Runtime Environment Bisheng (build 1.8.0_312-b07)
+OpenJDK 64-Bit Server VM Bisheng (build 25.312-b07, mixed mode)
+[root@localhost ~]# cd /media/tao_iot
+[root@localhost tao_iot]# java -jar haiwei-admin.jar
+Application Version: 3.8.7
+Spring Boot Version: 2.5.15
+////////////////////////////////////////////////////////////////////
+// _ooOoo_ //
+// o8888888o //
+// 88" . "88 //
+// (| ^_^ |) //
+// O\ = /O //
+// ____/`---'\____ //
+// .' \\| |// `. //
+// / \\||| : |||// \ //
+// / _||||| -:- |||||- \ //
+// | | \\\ - /// | | //
+// | \_| ''\---/'' | | //
+// \ .-\__ `-` ___/-. / //
+// ___`. .' /--.--\ `. . ___ //
+// ."" '< `.___\_<|>_/___.' >'"". //
+// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
+// \ \ `-. \_ __\ /__ _/ .-` / / //
+// ========`-.____`-.___\_____/___.-`____.-'======== //
+// `=---=' //
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
+// 佛祖保佑 永不宕机 永无BUG //
+////////////////////////////////////////////////////////////////////
+15:24:32.023 [main] INFO c.o.RuoYiApplication - [logStarting,55] - Starting RuoYiApplication using Java 1.8.0_312 on localhost.localdomain with PID 5266 (/media/tao_iot/haiwei-admin.jar started by root in /media/tao_iot)
+15:24:32.023 [background-preinit] INFO o.h.v.i.util.Version - [,21] - HV000001: Hibernate Validator 6.2.5.Final
+15:24:32.031 [main] DEBUG c.o.RuoYiApplication - [logStarting,56] - Running with Spring Boot v2.5.15, Spring v5.3.33
+15:24:32.032 [main] INFO c.o.RuoYiApplication - [logStartupProfileInfo,686] - The following 1 profile is active: "druid"
+15:24:41.695 [main] INFO o.a.c.h.Http11NioProtocol - [log,173] - Initializing ProtocolHandler ["http-nio-8020"]
+15:24:41.697 [main] INFO o.a.c.c.StandardService - [log,173] - Starting service [Tomcat]
+15:24:41.698 [main] INFO o.a.c.c.StandardEngine - [log,173] - Starting Servlet engine: [Apache Tomcat/9.0.75]
+15:24:41.937 [main] INFO o.a.c.c.C.[.[.[/] - [log,173] - Initializing Spring embedded WebApplicationContext
+15:24:43.907 [main] INFO org.redisson.Version - [logVersion,41] - Redisson 3.16.4
+15:24:44.453 [main] ERROR o.s.b.w.e.t.TomcatStarter - [onStartup,61] - Error starting Tomcat context. Exception: org.springframework.beans.factory.UnsatisfiedDependencyException. Message: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+15:24:44.814 [main] INFO o.a.c.c.StandardService - [log,173] - Stopping service [Tomcat]
+15:24:45.067 [main] WARN o.s.b.w.s.c.AnnotationConfigServletWebServerApplicationContext - [refresh,599] - Exception encountered during context initialization - cancelling refresh attempt: org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+15:24:45.154 [main] ERROR o.s.b.SpringApplication - [reportFailure,870] - Application run failed
+org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:163)
+ at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:585)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
+ at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:780)
+ at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:453)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:343)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1370)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1359)
+ at com.os.RuoYiApplication.main(RuoYiApplication.java:18)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:108)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:58)
+ at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:88)
+Caused by: org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:142)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.(TomcatWebServer.java:104)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getTomcatWebServer(TomcatServletWebServerFactory.java:456)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getWebServer(TomcatServletWebServerFactory.java:204)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.createWebServer(ServletWebServerApplicationContext.java:182)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:160)
+ ... 16 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:214)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:212)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:175)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:170)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAdaptableBeans(ServletContextInitializerBeans.java:155)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.(ServletContextInitializerBeans.java:87)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.getServletContextInitializerBeans(ServletWebServerApplicationContext.java:260)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.selfInitialize(ServletWebServerApplicationContext.java:234)
+ at org.springframework.boot.web.embedded.tomcat.TomcatStarter.onStartup(TomcatStarter.java:53)
+ at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:4936)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardHost.startInternal(StandardHost.java:795)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardEngine.startInternal(StandardEngine.java:249)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardService.startInternal(StandardService.java:428)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardServer.startInternal(StandardServer.java:914)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.startup.Tomcat.start(Tomcat.java:486)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:123)
+ ... 21 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 63 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 77 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 91 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 104 common frames omitted
+Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:646)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:477)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 118 common frames omitted
+Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:641)
+ ... 132 common frames omitted
+Caused by: org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.redisson.connection.pool.ConnectionPool$1.lambda$run$0(ConnectionPool.java:158)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.connection.pool.ConnectionPool.promiseFailure(ConnectionPool.java:313)
+ at org.redisson.connection.pool.ConnectionPool.lambda$createConnection$3(ConnectionPool.java:279)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.RedisClient$2$2.run(RedisClient.java:251)
+ at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174)
+ at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:569)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: 拒绝连接: 10.42.0.1/10.42.0.1:6379
+Caused by: java.net.ConnectException: 拒绝连接
+ at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
+ at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:716)
+ at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:337)
+ at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:334)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:776)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+[root@localhost tao_iot]# systemctl status redis
+● redis.service - redis-server
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: failed (Result: exit-code) since Sun 2025-04-27 15:18:06 CST; 9min ago
+ Process: 1945 ExecStart=/usr/local/redis/redis-5.0.5/src/redis-server /usr/local/redis/redis-5.0.5/redis.conf (code=exited, status=203/EXEC)
+
+4月 27 15:18:06 localhost.localdomain systemd[1]: Starting redis-server...
+4月 27 15:18:06 localhost.localdomain systemd[1]: redis.service: Control process exited, code=exited, status=203/EXEC
+4月 27 15:18:06 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:18:06 localhost.localdomain systemd[1]: Failed to start redis-server.
+[root@localhost tao_iot]# systemctl start redis
+Job for redis.service failed because the control process exited with error code.
+See "systemctl status redis.service" and "journalctl -xe" for details.
+[root@localhost tao_iot]# systemctl enable redis
+Synchronizing state of redis.service with SysV service script with /usr/lib/systemd/systemd-sysv-install.
+Executing: /usr/lib/systemd/systemd-sysv-install enable redis
+
+[root@localhost tao_iot]# cd /media/redis/redis-5.0.5
+[root@localhost redis-5.0.5]# systemctl status redis
+● redis.service - redis-server
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: failed (Result: exit-code) since Sun 2025-04-27 15:28:04 CST; 3min 13s ago
+
+4月 27 15:28:04 localhost.localdomain systemd[1]: Starting redis-server...
+4月 27 15:28:04 localhost.localdomain systemd[1]: redis.service: Control process exited, code=exited, status=203/EXEC
+4月 27 15:28:04 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:28:04 localhost.localdomain systemd[1]: Failed to start redis-server.
+4月 27 15:28:12 localhost.localdomain systemd[1]: /etc/systemd/system/redis.service:7: Failed to parse boolean value, ignoring: true
+4月 27 15:28:13 localhost.localdomain systemd[1]: /etc/systemd/system/redis.service:7: Failed to parse boolean value, ignoring: true
+[root@localhost redis-5.0.5]# systemctl enable redis
+Synchronizing state of redis.service with SysV service script with /usr/lib/systemd/systemd-sysv-install.
+Executing: /usr/lib/systemd/systemd-sysv-install enable redis
+^[[A[root@localhost redis-5.0.5]# systemctl enable redis
+Synchronizing state of redis.service with SysV service script with /usr/lib/systemd/systemd-sysv-install.
+Executing: /usr/lib/systemd/systemd-sysv-install enable redis
+[root@localhost redis-5.0.5]# systemctl enable redis
+Synchronizing state of redis.service with SysV service script with /usr/lib/systemd/systemd-sysv-install.
+Executing: /usr/lib/systemd/systemd-sysv-install enable redis
+[root@localhost redis-5.0.5]# systemctl start redis
+Job for redis.service failed because the control process exited with error code.
+See "systemctl status redis.service" and "journalctl -xe" for details.
+[root@localhost redis-5.0.5]# ls
+00-RELEASENOTES BUGS CONTRIBUTING COPYING deps INSTALL Makefile MANIFESTO README.md redis.conf runtest runtest-cluster runtest-moduleapi runtest-sentinel sentinel.conf src tests utils
+[root@localhost redis-5.0.5]#
+[root@localhost redis-5.0.5]# cd /media/redis/redis-5.0.5/src
+[root@localhost src]# ./redis-server
+5777:C 27 Apr 2025 15:33:14.742 # oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0Oo
+5777:C 27 Apr 2025 15:33:14.742 # Redis version=5.0.5, bits=64, commit=00000000, modified=0, pid=5777, just started
+5777:C 27 Apr 2025 15:33:14.742 # Warning: no config file specified, using the default config. In order to specify a config file use ./redis-server /path/to/redis.conf
+5777:M 27 Apr 2025 15:33:14.743 * Increased maximum number of open files to 10032 (it was originally set to 1024).
+ _._
+ _.-``__ ''-._
+ _.-`` `. `_. ''-._ Redis 5.0.5 (00000000/0) 64 bit
+ .-`` .-```. ```\/ _.,_ ''-._
+ ( ' , .-` | `, ) Running in standalone mode
+ |`-._`-...-` __...-.``-._|'` _.-'| Port: 6379
+ | `-._ `._ / _.-' | PID: 5777
+ `-._ `-._ `-./ _.-' _.-'
+ |`-._`-._ `-.__.-' _.-'_.-'|
+ | `-._`-._ _.-'_.-' | http://redis.io
+ `-._ `-._`-.__.-'_.-' _.-'
+ |`-._`-._ `-.__.-' _.-'_.-'|
+ | `-._`-._ _.-'_.-' |
+ `-._ `-._`-.__.-'_.-' _.-'
+ `-._ `-.__.-' _.-'
+ `-._ _.-'
+ `-.__.-'
+
+5777:M 27 Apr 2025 15:33:14.744 # Server initialized
+5777:M 27 Apr 2025 15:33:14.744 # WARNING overcommit_memory is set to 0! Background save may fail under low memory condition. To fix this issue add 'vm.overcommit_memory = 1' to /etc/sysctl.conf and then reboot or run the command 'sysctl vm.overcommit_memory=1' for this to take effect.
+5777:M 27 Apr 2025 15:33:14.744 # WARNING you have Transparent Huge Pages (THP) support enabled in your kernel. This will create latency and memory usage issues with Redis. To fix this issue run the command 'echo never > /sys/kernel/mm/transparent_hugepage/enabled' as root, and add it to your /etc/rc.local in order to retain the setting after a reboot. Redis must be restarted after THP is disabled.
+5777:M 27 Apr 2025 15:33:14.744 * Ready to accept connections
+
+[root@localhost tao_iot]# firewall-cmd --zone=public --add-port=6379/tcp --permanent
+success
+[root@localhost tao_iot]# firewall-cmd --reload
+success
+[root@localhost tao_iot]#
+
+
+[root@localhost tao_iot]# cd /media/redis/redis-5.0.5
+[root@localhost redis-5.0.5]# vi redis.conf
+E325: ATTENTION
+Found a swap file by the name ".redis.conf.swp"
+ owned by: root dated: 日 4月 27 11:27:11 2025
+ file name: /media/redis/redis-5.0.5/redis.conf
+ modified: YES
+ user name: root host name: localhost.localdomain
+ process ID: 867217
+While opening file "redis.conf"
+ dated: 日 4月 27 15:48:57 2025
+ NEWER than swap file!
+
+(1) Another program may be editing the same file. If this is the case,
+ be careful not to end up with two different instances of the same
+ file when making changes. Quit, or continue with caution.
+(2) An edit session for this file crashed.
+ If this is the case, use ":recover" or "vim -r redis.conf"
+ to recover the changes (see ":help recovery").
+ If you did this already, delete the swap file ".redis.conf.swp"
+ to avoid this message.
+"redis.conf" 1373L, 61863C
+Press ENTER or type command to continue
+
+[root@localhost tao_iot]# firewall-cmd --zone=public --add-port=6379/tcp --permanent
+success
+[root@localhost tao_iot]# firewall-cmd --reload
+success
+[root@localhost tao_iot]# systemctl status redis
+● redis.service - redis-server
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: failed (Result: exit-code) since Sun 2025-04-27 15:31:36 CST; 21min ago
+ Process: 5755 ExecStart=/usr/local/redis/redis-5.0.5/src/redis-server /usr/local/redis/redis-5.0.5/redis.conf (code=exited, status=203/EXEC)
+
+4月 27 15:31:36 localhost.localdomain systemd[1]: Starting redis-server...
+4月 27 15:31:36 localhost.localdomain systemd[1]: redis.service: Control process exited, code=exited, status=203/EXEC
+4月 27 15:31:36 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:31:36 localhost.localdomain systemd[1]: Failed to start redis-server.
+[root@localhost tao_iot]# cd /media/redis/redis-5.0.5
+[root@localhost redis-5.0.5]# ./redis-server
+-bash: ./redis-server: 没有那个文件或目录
+[root@localhost redis-5.0.5]# cd /media/redis/redis-5.0.5/src
+[root@localhost src]# ./redis-server
+6591:C 27 Apr 2025 15:53:38.865 # oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0Oo
+6591:C 27 Apr 2025 15:53:38.865 # Redis version=5.0.5, bits=64, commit=00000000, modified=0, pid=6591, just started
+6591:C 27 Apr 2025 15:53:38.865 # Warning: no config file specified, using the default config. In order to specify a config file use ./redis-server /path/to/redis.conf
+6591:M 27 Apr 2025 15:53:38.866 * Increased maximum number of open files to 10032 (it was originally set to 1024).
+6591:M 27 Apr 2025 15:53:38.866 # Could not create server TCP listening socket *:6379: bind: Address already in use
+
+[root@localhost src]# ps aux | grep redis-server
+root 5777 0.0 0.0 1289792 11200 pts/1 Sl+ 15:33 0:01 ./redis-server *:6379
+root 6607 0.0 0.0 214144 1536 pts/0 S+ 15:54 0:00 grep redis-server
+[root@localhost src]# ps aux | grep redis-server
+root 5777 0.0 0.0 1289792 11200 pts/1 Sl+ 15:33 0:01 ./redis-server *:6379
+root 6607 0.0 0.0 214144 1536 pts/0 S+ 15:54 0:00 grep redis-server
+[root@localhost src]# vi /etc/systemd/system/redis.service
+[root@localhost src]# ls -la /media/redis/redis-5.0.5/redis.conf
+-rw-rw-r-- 1 root root 61863 4月 27 15:48 /media/redis/redis-5.0.5/redis.conf
+[root@localhost src]# kill 5777
+[root@localhost src]# systemctl daemon-reload
+[root@localhost src]# # 启动Redis服务
+[root@localhost src]# systemctl start redis
+[root@localhost src]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: activating (auto-restart) (Result: exit-code) since Sun 2025-04-27 15:57:55 CST; 4s ago
+ Process: 6679 ExecStart=/media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf (code=exited, status=1/FAILURE)
+ Main PID: 6679 (code=exited, status=1/FAILURE)
+[root@localhost src]# /media/redis/redis-5.0.5/src/redis-cli -h 10.42.0.1 ping
+Could not connect to Redis at 10.42.0.1:6379: Connection refused
+[root@localhost src]#
+
+
+[root@localhost src]# journalctl -u redis -n 50
+-- Logs begin at Sun 2025-04-27 15:17:47 CST, end at Sun 2025-04-27 15:59:03 CST. --
+4月 27 15:58:42 localhost.localdomain systemd[1]: redis.service: Service RestartSec=5s expired, scheduling restart.
+4月 27 15:58:42 localhost.localdomain systemd[1]: redis.service: Scheduled restart job, restart counter is at 9.
+4月 27 15:58:42 localhost.localdomain systemd[1]: Stopped Redis In-Memory Data Store.
+4月 27 15:58:42 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+4月 27 15:58:42 localhost.localdomain redis-server[6709]: *** FATAL CONFIG FILE ERROR ***
+4月 27 15:58:42 localhost.localdomain redis-server[6709]: Reading the configuration file, at line 171
+4月 27 15:58:42 localhost.localdomain redis-server[6709]: >>> 'logfile "/media/redis/redis-log"'
+4月 27 15:58:42 localhost.localdomain redis-server[6709]: Can't open the log file: Is a directory
+4月 27 15:58:42 localhost.localdomain systemd[1]: redis.service: Main process exited, code=exited, status=1/FAILURE
+4月 27 15:58:42 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:58:48 localhost.localdomain systemd[1]: redis.service: Service RestartSec=5s expired, scheduling restart.
+4月 27 15:58:48 localhost.localdomain systemd[1]: redis.service: Scheduled restart job, restart counter is at 10.
+4月 27 15:58:48 localhost.localdomain systemd[1]: Stopped Redis In-Memory Data Store.
+4月 27 15:58:48 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+4月 27 15:58:48 localhost.localdomain redis-server[6713]: *** FATAL CONFIG FILE ERROR ***
+4月 27 15:58:48 localhost.localdomain redis-server[6713]: Reading the configuration file, at line 171
+4月 27 15:58:48 localhost.localdomain redis-server[6713]: >>> 'logfile "/media/redis/redis-log"'
+4月 27 15:58:48 localhost.localdomain redis-server[6713]: Can't open the log file: Is a directory
+4月 27 15:58:48 localhost.localdomain systemd[1]: redis.service: Main process exited, code=exited, status=1/FAILURE
+4月 27 15:58:48 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:58:53 localhost.localdomain systemd[1]: redis.service: Service RestartSec=5s expired, scheduling restart.
+4月 27 15:58:53 localhost.localdomain systemd[1]: redis.service: Scheduled restart job, restart counter is at 11.
+4月 27 15:58:53 localhost.localdomain systemd[1]: Stopped Redis In-Memory Data Store.
+4月 27 15:58:53 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+4月 27 15:58:53 localhost.localdomain redis-server[6715]: *** FATAL CONFIG FILE ERROR ***
+4月 27 15:58:53 localhost.localdomain redis-server[6715]: Reading the configuration file, at line 171
+4月 27 15:58:53 localhost.localdomain redis-server[6715]: >>> 'logfile "/media/redis/redis-log"'
+4月 27 15:58:53 localhost.localdomain redis-server[6715]: Can't open the log file: Is a directory
+4月 27 15:58:53 localhost.localdomain systemd[1]: redis.service: Main process exited, code=exited, status=1/FAILURE
+4月 27 15:58:53 localhost.localdomain systemd[1]: redis.service: Failed with result 'exit-code'.
+4月 27 15:58:58 localhost.localdomain systemd[1]: redis.service: Service RestartSec=5s expired, scheduling restart.
+4月 27 15:58:58 localhost.localdomain systemd[1]: redis.service: Scheduled restart job, restart counter is at 12.
+4月 27 15:58:58 localhost.localdomain systemd[1]: Stopped Redis In-Memory Data Store.
+4月 27 15:58:58 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+4月 27 15:58:58 localhost.localdomain redis-server[6717]: *** FATAL CONFIG FILE ERROR ***
+4月 27 15:58:58 localhost.localdomain redis-server[6717]: Reading the configuration file, at line 171
+4月 27 15:58:58 localhost.localdomain redis-server[6717]: >>> 'logfile "/media/redis/redis-log"'
+4月 27 15:58:58 localhost.localdomain redis-server[6717]: Can't open the log file: Is a directory
+lines 1-39
+
+
+[root@localhost ~]# mkdir -p /media/redis/redis-log/redis.log
+mkdir: 无法创建目录 “/media/redis/redis-log/redis.log”: 文件已存在
+[root@localhost ~]# systemctl restart redis
+[root@localhost ~]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: active (running) since Sun 2025-04-27 16:04:52 CST; 2ms ago
+ Main PID: 7094 (redis-server)
+ Tasks: 1
+ Memory: 2.0M
+ CGroup: /system.slice/redis.service
+ └─7094 /media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf
+
+4月 27 16:04:52 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+[root@localhost ~]# /media/redis/redis-5.0.5/src/redis-cli ping
+PONG
+[root@localhost ~]# cd /media/tao_iot
+[root@localhost tao_iot]# java -jar haiwei-admin.jar
+Application Version: 3.8.7
+Spring Boot Version: 2.5.15
+////////////////////////////////////////////////////////////////////
+// _ooOoo_ //
+// o8888888o //
+// 88" . "88 //
+// (| ^_^ |) //
+// O\ = /O //
+// ____/`---'\____ //
+// .' \\| |// `. //
+// / \\||| : |||// \ //
+// / _||||| -:- |||||- \ //
+// | | \\\ - /// | | //
+// | \_| ''\---/'' | | //
+// \ .-\__ `-` ___/-. / //
+// ___`. .' /--.--\ `. . ___ //
+// ."" '< `.___\_<|>_/___.' >'"". //
+// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
+// \ \ `-. \_ __\ /__ _/ .-` / / //
+// ========`-.____`-.___\_____/___.-`____.-'======== //
+// `=---=' //
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
+// 佛祖保佑 永不宕机 永无BUG //
+////////////////////////////////////////////////////////////////////
+16:05:38.380 [main] INFO c.o.RuoYiApplication - [logStarting,55] - Starting RuoYiApplication using Java 1.8.0_312 on localhost.localdomain with PID 7131 (/media/tao_iot/haiwei-admin.jar started by root in /media/tao_iot)
+16:05:38.378 [background-preinit] INFO o.h.v.i.util.Version - [,21] - HV000001: Hibernate Validator 6.2.5.Final
+16:05:38.384 [main] DEBUG c.o.RuoYiApplication - [logStarting,56] - Running with Spring Boot v2.5.15, Spring v5.3.33
+16:05:38.385 [main] INFO c.o.RuoYiApplication - [logStartupProfileInfo,686] - The following 1 profile is active: "druid"
+16:05:48.453 [main] INFO o.a.c.h.Http11NioProtocol - [log,173] - Initializing ProtocolHandler ["http-nio-8020"]
+16:05:48.455 [main] INFO o.a.c.c.StandardService - [log,173] - Starting service [Tomcat]
+16:05:48.456 [main] INFO o.a.c.c.StandardEngine - [log,173] - Starting Servlet engine: [Apache Tomcat/9.0.75]
+16:05:48.735 [main] INFO o.a.c.c.C.[.[.[/] - [log,173] - Initializing Spring embedded WebApplicationContext
+16:05:50.746 [main] INFO org.redisson.Version - [logVersion,41] - Redisson 3.16.4
+16:05:51.691 [redisson-netty-2-12] WARN i.n.u.c.DefaultPromise - [notifyListener0,593] - An exception was thrown by org.redisson.misc.RedissonPromise$$Lambda$724/322112198.operationComplete()
+java.util.concurrent.RejectedExecutionException: event executor terminated
+ at io.netty.util.concurrent.SingleThreadEventExecutor.reject(SingleThreadEventExecutor.java:934)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.offerTask(SingleThreadEventExecutor.java:351)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.addTask(SingleThreadEventExecutor.java:344)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.execute(SingleThreadEventExecutor.java:836)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.execute0(SingleThreadEventExecutor.java:827)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.execute(SingleThreadEventExecutor.java:817)
+ at io.netty.util.concurrent.AbstractEventExecutorGroup.execute(AbstractEventExecutorGroup.java:115)
+ at org.redisson.client.RedisClient$3.lambda$operationComplete$0(RedisClient.java:295)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.handler.BaseConnectionHandler.lambda$channelActive$1(BaseConnectionHandler.java:113)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.protocol.CommandData.tryFailure(CommandData.java:78)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:370)
+ at org.redisson.client.handler.CommandPubSubDecoder.decodeCommand(CommandPubSubDecoder.java:96)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:137)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:113)
+ at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:529)
+ at io.netty.handler.codec.ReplayingDecoder.callDecode(ReplayingDecoder.java:366)
+ at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:290)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
+ at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
+ at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+16:05:51.698 [main] ERROR o.s.b.w.e.t.TomcatStarter - [onStartup,61] - Error starting Tomcat context. Exception: org.springframework.beans.factory.UnsatisfiedDependencyException. Message: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+16:05:51.778 [main] INFO o.a.c.c.StandardService - [log,173] - Stopping service [Tomcat]
+16:05:52.024 [main] WARN o.s.b.w.s.c.AnnotationConfigServletWebServerApplicationContext - [refresh,599] - Exception encountered during context initialization - cancelling refresh attempt: org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+16:05:52.097 [main] ERROR o.s.b.SpringApplication - [reportFailure,870] - Application run failed
+org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:163)
+ at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:585)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
+ at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:780)
+ at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:453)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:343)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1370)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1359)
+ at com.os.RuoYiApplication.main(RuoYiApplication.java:18)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:108)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:58)
+ at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:88)
+Caused by: org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:142)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.(TomcatWebServer.java:104)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getTomcatWebServer(TomcatServletWebServerFactory.java:456)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getWebServer(TomcatServletWebServerFactory.java:204)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.createWebServer(ServletWebServerApplicationContext.java:182)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:160)
+ ... 16 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:214)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:212)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:175)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:170)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAdaptableBeans(ServletContextInitializerBeans.java:155)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.(ServletContextInitializerBeans.java:87)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.getServletContextInitializerBeans(ServletWebServerApplicationContext.java:260)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.selfInitialize(ServletWebServerApplicationContext.java:234)
+ at org.springframework.boot.web.embedded.tomcat.TomcatStarter.onStartup(TomcatStarter.java:53)
+ at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:4936)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardHost.startInternal(StandardHost.java:795)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardEngine.startInternal(StandardEngine.java:249)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardService.startInternal(StandardService.java:428)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardServer.startInternal(StandardServer.java:914)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.startup.Tomcat.start(Tomcat.java:486)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:123)
+ ... 21 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 63 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 77 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 91 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 104 common frames omitted
+Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:646)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:477)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 118 common frames omitted
+Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:641)
+ ... 132 common frames omitted
+Caused by: org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 10.42.0.1/10.42.0.1:6379
+ at org.redisson.connection.pool.ConnectionPool$1.lambda$run$0(ConnectionPool.java:158)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.connection.pool.ConnectionPool.promiseFailure(ConnectionPool.java:313)
+ at org.redisson.connection.pool.ConnectionPool.lambda$createConnection$3(ConnectionPool.java:279)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.RedisClient$2$1.run(RedisClient.java:242)
+ at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174)
+ at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:569)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: java.io.IOException: 断开的管道
+ at sun.nio.ch.FileDispatcherImpl.write0(Native Method)
+ at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47)
+ at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:93)
+ at sun.nio.ch.IOUtil.write(IOUtil.java:51)
+ at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:470)
+ at io.netty.channel.socket.nio.NioSocketChannel.doWrite(NioSocketChannel.java:415)
+ at io.netty.channel.AbstractChannel$AbstractUnsafe.flush0(AbstractChannel.java:931)
+ at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.flush0(AbstractNioChannel.java:354)
+ at io.netty.channel.AbstractChannel$AbstractUnsafe.flush(AbstractChannel.java:895)
+ at io.netty.channel.DefaultChannelPipeline$HeadContext.flush(DefaultChannelPipeline.java:1372)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:921)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeFlush(AbstractChannelHandlerContext.java:907)
+ at io.netty.channel.AbstractChannelHandlerContext.flush(AbstractChannelHandlerContext.java:893)
+ at io.netty.channel.ChannelOutboundHandlerAdapter.flush(ChannelOutboundHandlerAdapter.java:125)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeFlush0(AbstractChannelHandlerContext.java:925)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:941)
+ at io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:966)
+ at io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:934)
+ at org.redisson.client.handler.CommandsQueue.write(CommandsQueue.java:84)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:879)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:940)
+ at io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:966)
+ at io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:934)
+ at io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:984)
+ at io.netty.channel.DefaultChannelPipeline.writeAndFlush(DefaultChannelPipeline.java:1025)
+ at io.netty.channel.AbstractChannel.writeAndFlush(AbstractChannel.java:306)
+ at org.redisson.client.RedisConnection.send(RedisConnection.java:213)
+ at org.redisson.client.RedisConnection.async(RedisConnection.java:259)
+ at org.redisson.client.RedisConnection.async(RedisConnection.java:231)
+ at org.redisson.client.RedisConnection.async(RedisConnection.java:227)
+ at org.redisson.client.handler.BaseConnectionHandler.channelActive(BaseConnectionHandler.java:77)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:262)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:238)
+ at io.netty.channel.AbstractChannelHandlerContext.fireChannelActive(AbstractChannelHandlerContext.java:231)
+ at io.netty.channel.DefaultChannelPipeline$HeadContext.channelActive(DefaultChannelPipeline.java:1398)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:258)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:238)
+ at io.netty.channel.DefaultChannelPipeline.fireChannelActive(DefaultChannelPipeline.java:895)
+ at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.fulfillConnectPromise(AbstractNioChannel.java:305)
+ at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:335)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:776)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
+ ... 4 common frames omitted
+[root@localhost tao_iot]#
+[root@localhost tao_iot]# telnet 10.42.0.1 6379
+Trying 10.42.0.1...
+Connected to 10.42.0.1.
+Escape character is '^]'.
+-DENIED Redis is running in protected mode because protected mode is enabled, no bind address was specified, no authentication password is requested to clients. In this mode connections are only accepted from the loopback interface. If you want to connect from external computers to Redis you may adopt one of the following solutions: 1) Just disable protected mode sending the command 'CONFIG SET protected-mode no' from the loopback interface by connecting to Redis from the same host the server is running, however MAKE SURE Redis is not publicly accessible from internet if you do so. Use CONFIG REWRITE to make this change permanent. 2) Alternatively you can just disable the protected mode by editing the Redis configuration file, and setting the protected mode option to 'no', and then restarting the server. 3) If you started the server manually just for testing, restart it with the '--protected-mode no' option. 4) Setup a bind address or an authentication password. NOTE: You only need to do one of the above things in order for the server to start accepting connections from the outside.
+Connection closed by foreign host.
+[root@localhost tao_iot]#
+[root@localhost tao_iot]# sudo iptables -L -n
+Chain INPUT (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0 ctstate RELATED,ESTABLISHED
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+INPUT_direct all -- 0.0.0.0/0 0.0.0.0/0
+INPUT_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+DROP all -- 0.0.0.0/0 0.0.0.0/0 ctstate INVALID
+REJECT all -- 0.0.0.0/0 0.0.0.0/0 reject-with icmp-host-prohibited
+
+Chain FORWARD (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0 ctstate RELATED,ESTABLISHED
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_direct all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_IN_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_OUT_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+DROP all -- 0.0.0.0/0 0.0.0.0/0 ctstate INVALID
+REJECT all -- 0.0.0.0/0 0.0.0.0/0 reject-with icmp-host-prohibited
+
+Chain OUTPUT (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+OUTPUT_direct all -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FORWARD_IN_ZONES (1 references)
+target prot opt source destination
+FWDI_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+FWDI_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain FORWARD_OUT_ZONES (1 references)
+target prot opt source destination
+FWDO_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+FWDO_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain FORWARD_direct (1 references)
+target prot opt source destination
+
+Chain FWDI_public (2 references)
+target prot opt source destination
+FWDI_public_log all -- 0.0.0.0/0 0.0.0.0/0
+FWDI_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+FWDI_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+ACCEPT icmp -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FWDI_public_allow (1 references)
+target prot opt source destination
+
+Chain FWDI_public_deny (1 references)
+target prot opt source destination
+
+Chain FWDI_public_log (1 references)
+target prot opt source destination
+
+Chain FWDO_public (2 references)
+target prot opt source destination
+FWDO_public_log all -- 0.0.0.0/0 0.0.0.0/0
+FWDO_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+FWDO_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FWDO_public_allow (1 references)
+target prot opt source destination
+
+Chain FWDO_public_deny (1 references)
+target prot opt source destination
+
+Chain FWDO_public_log (1 references)
+target prot opt source destination
+
+Chain INPUT_ZONES (1 references)
+target prot opt source destination
+IN_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+IN_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain INPUT_direct (1 references)
+target prot opt source destination
+
+Chain IN_public (2 references)
+target prot opt source destination
+IN_public_log all -- 0.0.0.0/0 0.0.0.0/0
+IN_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+IN_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+ACCEPT icmp -- 0.0.0.0/0 0.0.0.0/0
+
+Chain IN_public_allow (1 references)
+target prot opt source destination
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:22 ctstate NEW,UNTRACKED
+ACCEPT udp -- 0.0.0.0/0 224.0.0.251 udp dpt:5353 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:9090 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:2379 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:8084 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:4000 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:7001 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:6379 ctstate NEW,UNTRACKED
+
+Chain IN_public_deny (1 references)
+target prot opt source destination
+
+Chain IN_public_log (1 references)
+target prot opt source destination
+
+Chain OUTPUT_direct (1 references)
+target prot opt source destination
+[root@localhost tao_iot]#
+[root@localhost tao_iot]# redis-cli -h 127.0.0.1 -p 6379 -a haiwei@123
+Warning: Using a password with '-a' or '-u' option on the command line interface may not be safe.
+127.0.0.1:6379> select 7
+OK
+127.0.0.1:6379[7]> ping
+PONG
+127.0.0.1:6379[7]> redis-cli -h 127.0.0.1 -p 6379
+(error) ERR unknown command `redis-cli`, with args beginning with: `-h`, `127.0.0.1`, `-p`, `6379`,
+127.0.0.1:6379[7]>
+[root@localhost tao_iot]# redis-cli -h 127.0.0.1 -p 6379
+127.0.0.1:6379> select 7
+OK
+127.0.0.1:6379[7]> ping
+PONG
+127.0.0.1:6379[7]>
+[root@localhost tao_iot]# sudo iptables -L -n
+Chain INPUT (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0 ctstate RELATED,ESTABLISHED
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+INPUT_direct all -- 0.0.0.0/0 0.0.0.0/0
+INPUT_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+DROP all -- 0.0.0.0/0 0.0.0.0/0 ctstate INVALID
+REJECT all -- 0.0.0.0/0 0.0.0.0/0 reject-with icmp-host-prohibited
+
+Chain FORWARD (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0 ctstate RELATED,ESTABLISHED
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_direct all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_IN_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+FORWARD_OUT_ZONES all -- 0.0.0.0/0 0.0.0.0/0
+DROP all -- 0.0.0.0/0 0.0.0.0/0 ctstate INVALID
+REJECT all -- 0.0.0.0/0 0.0.0.0/0 reject-with icmp-host-prohibited
+
+Chain OUTPUT (policy ACCEPT)
+target prot opt source destination
+ACCEPT all -- 0.0.0.0/0 0.0.0.0/0
+OUTPUT_direct all -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FORWARD_IN_ZONES (1 references)
+target prot opt source destination
+FWDI_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+FWDI_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain FORWARD_OUT_ZONES (1 references)
+target prot opt source destination
+FWDO_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+FWDO_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain FORWARD_direct (1 references)
+target prot opt source destination
+
+Chain FWDI_public (2 references)
+target prot opt source destination
+FWDI_public_log all -- 0.0.0.0/0 0.0.0.0/0
+FWDI_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+FWDI_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+ACCEPT icmp -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FWDI_public_allow (1 references)
+target prot opt source destination
+
+Chain FWDI_public_deny (1 references)
+target prot opt source destination
+
+Chain FWDI_public_log (1 references)
+target prot opt source destination
+
+Chain FWDO_public (2 references)
+target prot opt source destination
+FWDO_public_log all -- 0.0.0.0/0 0.0.0.0/0
+FWDO_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+FWDO_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+
+Chain FWDO_public_allow (1 references)
+target prot opt source destination
+
+Chain FWDO_public_deny (1 references)
+target prot opt source destination
+
+Chain FWDO_public_log (1 references)
+target prot opt source destination
+
+Chain INPUT_ZONES (1 references)
+target prot opt source destination
+IN_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+IN_public all -- 0.0.0.0/0 0.0.0.0/0 [goto]
+
+Chain INPUT_direct (1 references)
+target prot opt source destination
+
+Chain IN_public (2 references)
+target prot opt source destination
+IN_public_log all -- 0.0.0.0/0 0.0.0.0/0
+IN_public_deny all -- 0.0.0.0/0 0.0.0.0/0
+IN_public_allow all -- 0.0.0.0/0 0.0.0.0/0
+ACCEPT icmp -- 0.0.0.0/0 0.0.0.0/0
+
+Chain IN_public_allow (1 references)
+target prot opt source destination
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:22 ctstate NEW,UNTRACKED
+ACCEPT udp -- 0.0.0.0/0 224.0.0.251 udp dpt:5353 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:9090 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:2379 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:8084 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:4000 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:7001 ctstate NEW,UNTRACKED
+ACCEPT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:6379 ctstate NEW,UNTRACKED
+
+Chain IN_public_deny (1 references)
+target prot opt source destination
+
+Chain IN_public_log (1 references)
+target prot opt source destination
+
+Chain OUTPUT_direct (1 references)
+target prot opt source destination
+[root@localhost tao_iot]# java -jar haiwei-admin.jar
+Application Version: 3.8.7
+Spring Boot Version: 2.5.15
+////////////////////////////////////////////////////////////////////
+// _ooOoo_ //
+// o8888888o //
+// 88" . "88 //
+// (| ^_^ |) //
+// O\ = /O //
+// ____/`---'\____ //
+// .' \\| |// `. //
+// / \\||| : |||// \ //
+// / _||||| -:- |||||- \ //
+// | | \\\ - /// | | //
+// | \_| ''\---/'' | | //
+// \ .-\__ `-` ___/-. / //
+// ___`. .' /--.--\ `. . ___ //
+// ."" '< `.___\_<|>_/___.' >'"". //
+// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
+// \ \ `-. \_ __\ /__ _/ .-` / / //
+// ========`-.____`-.___\_____/___.-`____.-'======== //
+// `=---=' //
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
+// 佛祖保佑 永不宕机 永无BUG //
+////////////////////////////////////////////////////////////////////
+16:22:12.677 [main] INFO c.o.RuoYiApplication - [logStarting,55] - Starting RuoYiApplication using Java 1.8.0_312 on localhost.localdomain with PID 8642 (/media/tao_iot/haiwei-admin.jar started by root in /media/tao_iot)
+16:22:12.684 [main] DEBUG c.o.RuoYiApplication - [logStarting,56] - Running with Spring Boot v2.5.15, Spring v5.3.33
+16:22:12.685 [main] INFO c.o.RuoYiApplication - [logStartupProfileInfo,686] - The following 1 profile is active: "druid"
+16:22:12.688 [background-preinit] INFO o.h.v.i.util.Version - [,21] - HV000001: Hibernate Validator 6.2.5.Final
+16:22:22.898 [main] INFO o.a.c.h.Http11NioProtocol - [log,173] - Initializing ProtocolHandler ["http-nio-8020"]
+16:22:22.900 [main] INFO o.a.c.c.StandardService - [log,173] - Starting service [Tomcat]
+16:22:22.902 [main] INFO o.a.c.c.StandardEngine - [log,173] - Starting Servlet engine: [Apache Tomcat/9.0.75]
+16:22:23.181 [main] INFO o.a.c.c.C.[.[.[/] - [log,173] - Initializing Spring embedded WebApplicationContext
+16:22:25.330 [main] INFO org.redisson.Version - [logVersion,41] - Redisson 3.16.4
+16:22:26.279 [main] ERROR o.s.b.w.e.t.TomcatStarter - [onStartup,61] - Error starting Tomcat context. Exception: org.springframework.beans.factory.UnsatisfiedDependencyException. Message: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+16:22:26.358 [main] INFO o.a.c.c.StandardService - [log,173] - Stopping service [Tomcat]
+16:22:26.604 [main] WARN o.s.b.w.s.c.AnnotationConfigServletWebServerApplicationContext - [refresh,599] - Exception encountered during context initialization - cancelling refresh attempt: org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+16:22:26.678 [main] ERROR o.s.b.SpringApplication - [reportFailure,870] - Application run failed
+org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:163)
+ at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:585)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
+ at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:780)
+ at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:453)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:343)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1370)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1359)
+ at com.os.RuoYiApplication.main(RuoYiApplication.java:18)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:108)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:58)
+ at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:88)
+Caused by: org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:142)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.(TomcatWebServer.java:104)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getTomcatWebServer(TomcatServletWebServerFactory.java:456)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getWebServer(TomcatServletWebServerFactory.java:204)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.createWebServer(ServletWebServerApplicationContext.java:182)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:160)
+ ... 16 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:214)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:212)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:175)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:170)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAdaptableBeans(ServletContextInitializerBeans.java:155)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.(ServletContextInitializerBeans.java:87)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.getServletContextInitializerBeans(ServletWebServerApplicationContext.java:260)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.selfInitialize(ServletWebServerApplicationContext.java:234)
+ at org.springframework.boot.web.embedded.tomcat.TomcatStarter.onStartup(TomcatStarter.java:53)
+ at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:4936)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardHost.startInternal(StandardHost.java:795)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardEngine.startInternal(StandardEngine.java:249)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardService.startInternal(StandardService.java:428)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardServer.startInternal(StandardServer.java:914)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.startup.Tomcat.start(Tomcat.java:486)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:123)
+ ... 21 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 63 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 77 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 91 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 104 common frames omitted
+Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:646)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:477)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 118 common frames omitted
+Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:641)
+ ... 132 common frames omitted
+Caused by: org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.redisson.connection.pool.ConnectionPool$1.lambda$run$0(ConnectionPool.java:158)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.connection.pool.ConnectionPool.promiseFailure(ConnectionPool.java:313)
+ at org.redisson.connection.pool.ConnectionPool.lambda$createConnection$3(ConnectionPool.java:279)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.RedisClient$2$1.run(RedisClient.java:242)
+ at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174)
+ at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:569)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: org.redisson.client.RedisException: ERR Client sent AUTH, but no password is set. channel: [id: 0x75f4890e, L:/127.0.0.1:56236 - R:127.0.0.1/127.0.0.1:6379] command: (AUTH), params: (password masked)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:370)
+ at org.redisson.client.handler.CommandDecoder.decodeCommand(CommandDecoder.java:198)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:137)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:113)
+ at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:529)
+ at io.netty.handler.codec.ReplayingDecoder.callDecode(ReplayingDecoder.java:366)
+ at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:290)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
+ at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
+ at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
+ ... 4 common frames omitted
+[root@localhost tao_iot]#
+
+
+[root@localhost tao_iot]# java -jar haiwei-admin.jar --DEBUG
+Application Version: 3.8.7
+Spring Boot Version: 2.5.15
+////////////////////////////////////////////////////////////////////
+// _ooOoo_ //
+// o8888888o //
+// 88" . "88 //
+// (| ^_^ |) //
+// O\ = /O //
+// ____/`---'\____ //
+// .' \\| |// `. //
+// / \\||| : |||// \ //
+// / _||||| -:- |||||- \ //
+// | | \\\ - /// | | //
+// | \_| ''\---/'' | | //
+// \ .-\__ `-` ___/-. / //
+// ___`. .' /--.--\ `. . ___ //
+// ."" '< `.___\_<|>_/___.' >'"". //
+// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
+// \ \ `-. \_ __\ /__ _/ .-` / / //
+// ========`-.____`-.___\_____/___.-`____.-'======== //
+// `=---=' //
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
+// 佛祖保佑 永不宕机 永无BUG //
+////////////////////////////////////////////////////////////////////
+16:28:20.819 [main] INFO c.o.RuoYiApplication - [logStarting,55] - Starting RuoYiApplication using Java 1.8.0_312 on localhost.localdomain with PID 9077 (/media/tao_iot/haiwei-admin.jar started by root in /media/tao_iot)
+16:28:20.822 [background-preinit] INFO o.h.v.i.util.Version - [,21] - HV000001: Hibernate Validator 6.2.5.Final
+16:28:20.826 [main] DEBUG c.o.RuoYiApplication - [logStarting,56] - Running with Spring Boot v2.5.15, Spring v5.3.33
+16:28:20.828 [main] INFO c.o.RuoYiApplication - [logStartupProfileInfo,686] - The following 1 profile is active: "druid"
+16:28:20.829 [main] DEBUG o.s.b.SpringApplication - [load,713] - Loading source class com.os.RuoYiApplication
+16:28:20.977 [main] DEBUG o.s.b.w.s.c.AnnotationConfigServletWebServerApplicationContext - [prepareRefresh,637] - Refreshing org.springframework.boot.web.servlet.context.AnnotationConfigServletWebServerApplicationContext@74650e52
+16:28:23.676 [main] DEBUG o.s.b.c.p.s.ConfigurationPropertySourcesPropertyResolver$DefaultResolver - [logKeyFound,115] - Found key 'spring.datasource.druid.statViewServlet.enabled' in PropertySource 'Config resource 'class path resource [application-druid.yml]' via location 'optional:classpath:/'' with value of type Boolean
+16:28:24.797 [main] DEBUG o.s.b.a.AutoConfigurationPackages - [get,196] - @EnableAutoConfiguration was declared on a class in the package 'com.os'. Automatic @Repository and @Entity scanning is enabled.
+16:28:29.239 [main] DEBUG o.s.b.w.e.t.TomcatServletWebServerFactory - [getArchiveFileDocumentRoot,81] - Code archive: /media/tao_iot/haiwei-admin.jar
+16:28:29.240 [main] DEBUG o.s.b.w.e.t.TomcatServletWebServerFactory - [getExplodedWarFileDocumentRoot,125] - Code archive: /media/tao_iot/haiwei-admin.jar
+16:28:29.240 [main] DEBUG o.s.b.w.e.t.TomcatServletWebServerFactory - [logNoDocumentRoots,149] - None of the document roots [src/main/webapp, public, static] point to a directory and will be ignored.
+16:28:29.309 [main] INFO o.s.b.w.e.t.TomcatWebServer - [initialize,108] - Tomcat initialized with port(s): 8020 (http)
+16:28:29.341 [main] INFO o.a.c.h.Http11NioProtocol - [log,173] - Initializing ProtocolHandler ["http-nio-8020"]
+16:28:29.342 [main] INFO o.a.c.c.StandardService - [log,173] - Starting service [Tomcat]
+16:28:29.343 [main] INFO o.a.c.c.StandardEngine - [log,173] - Starting Servlet engine: [Apache Tomcat/9.0.75]
+16:28:29.535 [main] INFO o.a.c.c.C.[.[.[/] - [log,173] - Initializing Spring embedded WebApplicationContext
+16:28:29.536 [main] DEBUG o.s.b.w.s.c.ServletWebServerApplicationContext - [prepareWebApplicationContext,284] - Published root WebApplicationContext as ServletContext attribute with name [org.springframework.web.context.WebApplicationContext.ROOT]
+16:28:29.536 [main] INFO o.s.b.w.s.c.ServletWebServerApplicationContext - [prepareWebApplicationContext,290] - Root WebApplicationContext: initialization completed in 8559 ms
+16:28:29.617 [main] DEBUG o.s.b.c.p.s.ConfigurationPropertySourcesPropertyResolver$DefaultResolver - [logKeyFound,115] - Found key 'xss.urlPatterns' in PropertySource 'Config resource 'class path resource [application.yml]' via location 'optional:classpath:/'' with value of type String
+16:28:30.311 [main] DEBUG o.s.b.c.p.s.ConfigurationPropertySourcesPropertyResolver$DefaultResolver - [logKeyFound,115] - Found key 'token.expireTime' in PropertySource 'Config resource 'class path resource [application.yml]' via location 'optional:classpath:/'' with value of type Integer
+16:28:31.034 [main] INFO org.redisson.Version - [logVersion,41] - Redisson 3.16.4
+16:28:31.963 [main] ERROR o.s.b.w.e.t.TomcatStarter - [onStartup,61] - Error starting Tomcat context. Exception: org.springframework.beans.factory.UnsatisfiedDependencyException. Message: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+16:28:32.040 [main] INFO o.a.c.c.StandardService - [log,173] - Stopping service [Tomcat]
+16:28:32.286 [main] WARN o.s.b.w.s.c.AnnotationConfigServletWebServerApplicationContext - [refresh,599] - Exception encountered during context initialization - cancelling refresh attempt: org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+16:28:32.365 [main] DEBUG o.s.b.a.l.ConditionEvaluationReportLoggingListener - [logAutoConfigurationReport,126] -
+
+
+============================
+CONDITIONS EVALUATION REPORT
+============================
+
+
+Positive matches:
+-----------------
+
+ AopAutoConfiguration matched:
+ - @ConditionalOnProperty (spring.aop.auto=true) matched (OnPropertyCondition)
+
+ AopAutoConfiguration.AspectJAutoProxyingConfiguration matched:
+ - @ConditionalOnClass found required class 'org.aspectj.weaver.Advice' (OnClassCondition)
+
+ AopAutoConfiguration.AspectJAutoProxyingConfiguration.CglibAutoProxyConfiguration matched:
+ - @ConditionalOnProperty (spring.aop.proxy-target-class=true) matched (OnPropertyCondition)
+
+ BeanValidatorPluginsConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.validation.executable.ExecutableValidator' (OnClassCondition)
+
+ CacheAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.cache.CacheManager' (OnClassCondition)
+ - @ConditionalOnBean (types: org.springframework.cache.interceptor.CacheAspectSupport; SearchStrategy: all) found bean 'cacheInterceptor'; @ConditionalOnMissingBean (names: cacheResolver types: org.springframework.cache.CacheManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ CacheAutoConfiguration#cacheManagerCustomizers matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.cache.CacheManagerCustomizers; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ CacheMeterBinderProvidersConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.instrument.binder.MeterBinder' (OnClassCondition)
+
+ CacheMeterBinderProvidersConfiguration.JCacheCacheMeterBinderProviderConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.cache.jcache.JCacheCache', 'javax.cache.CacheManager' (OnClassCondition)
+
+ CacheMeterBinderProvidersConfiguration.RedisCacheMeterBinderProviderConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.redis.cache.RedisCache' (OnClassCondition)
+
+ CacheMetricsAutoConfiguration matched:
+ - @ConditionalOnBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) found bean 'cacheManager' (OnBeanCondition)
+
+ CacheMetricsRegistrarConfiguration matched:
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.metrics.cache.CacheMeterBinderProvider,io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found beans 'redissonCacheMeterBinderProvider', 'simpleMeterRegistry', 'redisCacheMeterBinderProvider', 'jCacheCacheMeterBinderProvider' (OnBeanCondition)
+
+ CompositeMeterRegistryAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.instrument.composite.CompositeMeterRegistry' (OnClassCondition)
+
+ DataSourceHealthContributorAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.jdbc.core.JdbcTemplate', 'org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource' (OnClassCondition)
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+ - @ConditionalOnBean (types: javax.sql.DataSource; SearchStrategy: all) found beans 'masterDataSource', 'dynamicDataSource' (OnBeanCondition)
+
+ DataSourceHealthContributorAutoConfiguration#dbHealthContributor matched:
+ - @ConditionalOnMissingBean (names: dbHealthIndicator,dbHealthContributor; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DataSourceInitializationConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.jdbc.datasource.init.DatabasePopulator' (OnClassCondition)
+ - @ConditionalOnSingleCandidate (types: javax.sql.DataSource; SearchStrategy: all) found a single primary bean 'dynamicDataSource' from beans 'masterDataSource', 'dynamicDataSource'; @ConditionalOnMissingBean (types: org.springframework.boot.sql.init.AbstractScriptDatabaseInitializer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DataSourcePoolMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.sql.DataSource', 'io.micrometer.core.instrument.MeterRegistry' (OnClassCondition)
+ - @ConditionalOnBean (types: javax.sql.DataSource,io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found beans 'masterDataSource', 'dynamicDataSource', 'simpleMeterRegistry' (OnBeanCondition)
+
+ DataSourcePoolMetricsAutoConfiguration.HikariDataSourceMetricsConfiguration matched:
+ - @ConditionalOnClass found required class 'com.zaxxer.hikari.HikariDataSource' (OnClassCondition)
+
+ DataSourceTransactionManagerAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.jdbc.core.JdbcTemplate', 'org.springframework.transaction.TransactionManager' (OnClassCondition)
+
+ DataSourceTransactionManagerAutoConfiguration.JdbcTransactionManagerConfiguration matched:
+ - @ConditionalOnSingleCandidate (types: javax.sql.DataSource; SearchStrategy: all) found a single primary bean 'dynamicDataSource' from beans 'masterDataSource', 'dynamicDataSource' (OnBeanCondition)
+
+ DataSourceTransactionManagerAutoConfiguration.JdbcTransactionManagerConfiguration#transactionManager matched:
+ - @ConditionalOnMissingBean (types: org.springframework.transaction.TransactionManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DiskSpaceHealthContributorAutoConfiguration matched:
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+
+ DiskSpaceHealthContributorAutoConfiguration#diskSpaceHealthIndicator matched:
+ - @ConditionalOnMissingBean (names: diskSpaceHealthIndicator; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DispatcherServletAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ DispatcherServletAutoConfiguration.DispatcherServletConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.servlet.ServletRegistration' (OnClassCondition)
+ - Default DispatcherServlet did not find dispatcher servlet beans (DispatcherServletAutoConfiguration.DefaultDispatcherServletCondition)
+
+ DispatcherServletAutoConfiguration.DispatcherServletRegistrationConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.servlet.ServletRegistration' (OnClassCondition)
+ - DispatcherServlet Registration did not find servlet registration bean (DispatcherServletAutoConfiguration.DispatcherServletRegistrationCondition)
+
+ DispatcherServletAutoConfiguration.DispatcherServletRegistrationConfiguration#dispatcherServletRegistration matched:
+ - @ConditionalOnBean (names: dispatcherServlet types: org.springframework.web.servlet.DispatcherServlet; SearchStrategy: all) found bean 'dispatcherServlet' (OnBeanCondition)
+
+ DruidConfig#removeDruidFilterRegistrationBean matched:
+ - @ConditionalOnProperty (spring.datasource.druid.statViewServlet.enabled=true) matched (OnPropertyCondition)
+
+ DruidDataSourceAutoConfigure matched:
+ - @ConditionalOnClass found required class 'com.alibaba.druid.pool.DruidDataSource' (OnClassCondition)
+
+ DruidDynamicDataSourceConfiguration matched:
+ - @ConditionalOnClass found required class 'com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure' (OnClassCondition)
+
+ DruidFilterConfiguration#statFilter matched:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.stat.enabled) matched (OnPropertyCondition)
+ - @ConditionalOnMissingBean (types: com.alibaba.druid.filter.stat.StatFilter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DruidStatViewServletConfiguration matched:
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (spring.datasource.druid.stat-view-servlet.enabled=true) matched (OnPropertyCondition)
+
+ DruidWebStatFilterConfiguration matched:
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (spring.datasource.druid.web-stat-filter.enabled=true) matched (OnPropertyCondition)
+
+ DynamicDataSourceAutoConfiguration matched:
+ - @ConditionalOnProperty (spring.datasource.dynamic.enabled=true) matched (OnPropertyCondition)
+
+ DynamicDataSourceAutoConfiguration#dataSourceInitEvent matched:
+ - @ConditionalOnMissingBean (types: com.baomidou.dynamic.datasource.event.DataSourceInitEvent; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DynamicDataSourceAutoConfiguration#dsProcessor matched:
+ - @ConditionalOnMissingBean (types: com.baomidou.dynamic.datasource.processor.DsProcessor; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DynamicDataSourceAutoConfiguration#dynamicDatasourceAnnotationAdvisor matched:
+ - @ConditionalOnProperty (spring.datasource.dynamic.aop.enabled=true) matched (OnPropertyCondition)
+
+ DynamicDataSourceAutoConfiguration#dynamicTransactionAdvisor matched:
+ - @ConditionalOnProperty (spring.datasource.dynamic.seata=false) matched (OnPropertyCondition)
+
+ DynamicDataSourceCreatorAutoConfiguration#dataSourceCreator matched:
+ - @ConditionalOnMissingBean (types: com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ DynamicDataSourceCreatorAutoConfiguration.DruidDataSourceCreatorConfiguration matched:
+ - @ConditionalOnClass found required class 'com.alibaba.druid.pool.DruidDataSource' (OnClassCondition)
+
+ DynamicDataSourceCreatorAutoConfiguration.HikariDataSourceCreatorConfiguration matched:
+ - @ConditionalOnClass found required class 'com.zaxxer.hikari.HikariDataSource' (OnClassCondition)
+
+ EmbeddedWebServerFactoryCustomizerAutoConfiguration matched:
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+
+ EmbeddedWebServerFactoryCustomizerAutoConfiguration.TomcatWebServerFactoryCustomizerConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.apache.catalina.startup.Tomcat', 'org.apache.coyote.UpgradeProtocol' (OnClassCondition)
+
+ EndpointAutoConfiguration#endpointCachingOperationInvokerAdvisor matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.invoker.cache.CachingOperationInvokerAdvisor; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ EndpointAutoConfiguration#endpointOperationParameterMapper matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.invoke.ParameterValueMapper; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ErrorMvcAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.servlet.Servlet', 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ ErrorMvcAutoConfiguration#basicErrorController matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.web.servlet.error.ErrorController; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ ErrorMvcAutoConfiguration#errorAttributes matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.web.servlet.error.ErrorAttributes; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ ErrorMvcAutoConfiguration.DefaultErrorViewResolverConfiguration#conventionErrorViewResolver matched:
+ - @ConditionalOnBean (types: org.springframework.web.servlet.DispatcherServlet; SearchStrategy: all) found bean 'dispatcherServlet'; @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.web.servlet.error.ErrorViewResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ErrorMvcAutoConfiguration.WhitelabelErrorViewConfiguration matched:
+ - @ConditionalOnProperty (server.error.whitelabel.enabled) matched (OnPropertyCondition)
+ - ErrorTemplate Missing did not find error template view (ErrorMvcAutoConfiguration.ErrorTemplateMissingCondition)
+
+ ErrorMvcAutoConfiguration.WhitelabelErrorViewConfiguration#beanNameViewResolver matched:
+ - @ConditionalOnMissingBean (types: org.springframework.web.servlet.view.BeanNameViewResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ErrorMvcAutoConfiguration.WhitelabelErrorViewConfiguration#defaultErrorView matched:
+ - @ConditionalOnMissingBean (names: error; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ FilterConfig#xssFilterRegistration matched:
+ - @ConditionalOnProperty (xss.enabled=true) matched (OnPropertyCondition)
+
+ HealthContributorAutoConfiguration#pingHealthContributor matched:
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+
+ HealthEndpointAutoConfiguration matched:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.health.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint marked as exposed by a 'management.endpoints.web.exposure' property (OnAvailableEndpointCondition)
+
+ HealthEndpointConfiguration#healthContributorRegistry matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.HealthContributorRegistry; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HealthEndpointConfiguration#healthEndpoint matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.HealthEndpoint; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HealthEndpointConfiguration#healthEndpointGroups matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.HealthEndpointGroups; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HealthEndpointConfiguration#healthHttpCodeStatusMapper matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.HttpCodeStatusMapper; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HealthEndpointConfiguration#healthStatusAggregator matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.StatusAggregator; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HealthEndpointWebExtensionConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.health.HealthEndpoint; SearchStrategy: all) found bean 'healthEndpoint' (OnBeanCondition)
+
+ HealthEndpointWebExtensionConfiguration#healthEndpointWebExtension matched:
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.health.HealthEndpoint; SearchStrategy: all) found bean 'healthEndpoint'; @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.HealthEndpointWebExtension; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HttpClientMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.instrument.MeterRegistry' (OnClassCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ HttpEncodingAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.filter.CharacterEncodingFilter' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (server.servlet.encoding.enabled) matched (OnPropertyCondition)
+
+ HttpEncodingAutoConfiguration#characterEncodingFilter matched:
+ - @ConditionalOnMissingBean (types: org.springframework.web.filter.CharacterEncodingFilter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HttpMessageConvertersAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.http.converter.HttpMessageConverter' (OnClassCondition)
+ - NoneNestedConditions 0 matched 1 did not; NestedCondition on HttpMessageConvertersAutoConfiguration.NotReactiveWebApplicationCondition.ReactiveWebApplication did not find reactive web application classes (HttpMessageConvertersAutoConfiguration.NotReactiveWebApplicationCondition)
+
+ HttpMessageConvertersAutoConfiguration#messageConverters matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.http.HttpMessageConverters; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ HttpMessageConvertersAutoConfiguration.StringHttpMessageConverterConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.http.converter.StringHttpMessageConverter' (OnClassCondition)
+
+ HttpMessageConvertersAutoConfiguration.StringHttpMessageConverterConfiguration#stringHttpMessageConverter matched:
+ - @ConditionalOnMissingBean (types: org.springframework.http.converter.StringHttpMessageConverter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ InfoContributorAutoConfiguration#envInfoContributor matched:
+ - @ConditionalOnEnabledInfoContributor management.info.defaults.enabled is considered true (OnEnabledInfoContributorCondition)
+
+ JCacheCacheConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.cache.Caching', 'org.springframework.cache.jcache.JCacheCacheManager' (OnClassCondition)
+ - Cache org.springframework.boot.autoconfigure.cache.JCacheCacheConfiguration automatic cache type (CacheCondition)
+ - AnyNestedCondition 1 matched 1 did not; NestedCondition on JCacheCacheConfiguration.JCacheAvailableCondition.CustomJCacheCacheManager @ConditionalOnSingleCandidate (types: javax.cache.CacheManager; SearchStrategy: all) did not find any beans; NestedCondition on JCacheCacheConfiguration.JCacheAvailableCondition.JCacheProvider JCache found single JSR-107 provider (JCacheCacheConfiguration.JCacheAvailableCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JCacheCacheConfiguration#jCacheCacheManager matched:
+ - @ConditionalOnMissingBean (types: javax.cache.CacheManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JacksonAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'com.fasterxml.jackson.databind.ObjectMapper' (OnClassCondition)
+
+ JacksonAutoConfiguration.Jackson2ObjectMapperBuilderCustomizerConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.http.converter.json.Jackson2ObjectMapperBuilder' (OnClassCondition)
+
+ JacksonAutoConfiguration.JacksonObjectMapperBuilderConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.http.converter.json.Jackson2ObjectMapperBuilder' (OnClassCondition)
+
+ JacksonAutoConfiguration.JacksonObjectMapperBuilderConfiguration#jacksonObjectMapperBuilder matched:
+ - @ConditionalOnMissingBean (types: org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JacksonAutoConfiguration.JacksonObjectMapperConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.http.converter.json.Jackson2ObjectMapperBuilder' (OnClassCondition)
+
+ JacksonAutoConfiguration.JacksonObjectMapperConfiguration#jacksonObjectMapper matched:
+ - @ConditionalOnMissingBean (types: com.fasterxml.jackson.databind.ObjectMapper; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JacksonAutoConfiguration.ParameterNamesModuleConfiguration matched:
+ - @ConditionalOnClass found required class 'com.fasterxml.jackson.module.paramnames.ParameterNamesModule' (OnClassCondition)
+
+ JacksonAutoConfiguration.ParameterNamesModuleConfiguration#parameterNamesModule matched:
+ - @ConditionalOnMissingBean (types: com.fasterxml.jackson.module.paramnames.ParameterNamesModule; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JacksonHttpMessageConvertersConfiguration.MappingJackson2HttpMessageConverterConfiguration matched:
+ - @ConditionalOnClass found required class 'com.fasterxml.jackson.databind.ObjectMapper' (OnClassCondition)
+ - @ConditionalOnProperty (spring.mvc.converters.preferred-json-mapper=jackson) matched (OnPropertyCondition)
+ - @ConditionalOnBean (types: com.fasterxml.jackson.databind.ObjectMapper; SearchStrategy: all) found bean 'jacksonObjectMapper' (OnBeanCondition)
+
+ JacksonHttpMessageConvertersConfiguration.MappingJackson2HttpMessageConverterConfiguration#mappingJackson2HttpMessageConverter matched:
+ - @ConditionalOnMissingBean (types: org.springframework.http.converter.json.MappingJackson2HttpMessageConverter ignored: org.springframework.hateoas.server.mvc.TypeConstrainedMappingJackson2HttpMessageConverter,org.springframework.data.rest.webmvc.alps.AlpsJsonHttpMessageConverter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JdbcTemplateAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.sql.DataSource', 'org.springframework.jdbc.core.JdbcTemplate' (OnClassCondition)
+ - @ConditionalOnSingleCandidate (types: javax.sql.DataSource; SearchStrategy: all) found a single primary bean 'dynamicDataSource' from beans 'masterDataSource', 'dynamicDataSource' (OnBeanCondition)
+
+ JdbcTemplateConfiguration matched:
+ - @ConditionalOnMissingBean (types: org.springframework.jdbc.core.JdbcOperations; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JvmMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.instrument.MeterRegistry' (OnClassCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ JvmMetricsAutoConfiguration#classLoaderMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.jvm.ClassLoaderMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JvmMetricsAutoConfiguration#jvmGcMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.jvm.JvmGcMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JvmMetricsAutoConfiguration#jvmMemoryMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ JvmMetricsAutoConfiguration#jvmThreadMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.jvm.JvmThreadMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ LettuceConnectionConfiguration matched:
+ - @ConditionalOnClass found required class 'io.lettuce.core.RedisClient' (OnClassCondition)
+ - @ConditionalOnProperty (spring.redis.client-type=lettuce) matched (OnPropertyCondition)
+
+ LettuceConnectionConfiguration#lettuceClientResources matched:
+ - @ConditionalOnMissingBean (types: io.lettuce.core.resource.ClientResources; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ LifecycleAutoConfiguration#defaultLifecycleProcessor matched:
+ - @ConditionalOnMissingBean (names: lifecycleProcessor; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ LogbackMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'io.micrometer.core.instrument.MeterRegistry', 'ch.qos.logback.classic.LoggerContext', 'org.slf4j.LoggerFactory' (OnClassCondition)
+ - LogbackLoggingCondition ILoggerFactory is a Logback LoggerContext (LogbackMetricsAutoConfiguration.LogbackLoggingCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ LogbackMetricsAutoConfiguration#logbackMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.logging.LogbackMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ManagementContextAutoConfiguration.SameManagementContextConfiguration matched:
+ - Management Port actual port type (SAME) matched required type (OnManagementPortCondition)
+
+ MessageSourceAutoConfiguration matched:
+ - ResourceBundle found bundle URL [jar:file:/media/tao_iot/haiwei-admin.jar!/BOOT-INF/classes!/i18n/messages.properties] (MessageSourceAutoConfiguration.ResourceBundleCondition)
+ - @ConditionalOnMissingBean (names: messageSource; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ MetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.annotation.Timed' (OnClassCondition)
+
+ MetricsAutoConfiguration#micrometerClock matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.Clock; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ MultipartAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.servlet.Servlet', 'org.springframework.web.multipart.support.StandardServletMultipartResolver', 'javax.servlet.MultipartConfigElement' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (spring.servlet.multipart.enabled) matched (OnPropertyCondition)
+
+ MultipartAutoConfiguration#multipartConfigElement matched:
+ - @ConditionalOnMissingBean (types: javax.servlet.MultipartConfigElement,org.springframework.web.multipart.commons.CommonsMultipartResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ MultipartAutoConfiguration#multipartResolver matched:
+ - @ConditionalOnMissingBean (types: org.springframework.web.multipart.MultipartResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ MybatisAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.apache.ibatis.session.SqlSessionFactory', 'org.mybatis.spring.SqlSessionFactoryBean' (OnClassCondition)
+ - @ConditionalOnSingleCandidate (types: javax.sql.DataSource; SearchStrategy: all) found a single primary bean 'dynamicDataSource' from beans 'masterDataSource', 'dynamicDataSource' (OnBeanCondition)
+
+ MybatisAutoConfiguration#sqlSessionTemplate matched:
+ - @ConditionalOnMissingBean (types: org.mybatis.spring.SqlSessionTemplate; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ MybatisLanguageDriverAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.apache.ibatis.scripting.LanguageDriver' (OnClassCondition)
+
+ NamedParameterJdbcTemplateConfiguration matched:
+ - @ConditionalOnSingleCandidate (types: org.springframework.jdbc.core.JdbcTemplate; SearchStrategy: all) found a single bean 'jdbcTemplate'; @ConditionalOnMissingBean (types: org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ NettyAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.netty.util.NettyRuntime' (OnClassCondition)
+
+ OpenApiAutoConfiguration matched:
+ - @ConditionalOnProperty (springfox.documentation.enabled=true) matched (OnPropertyCondition)
+
+ OpenApiControllerWebMvc matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ OpenApiWebMvcConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ PageHelperAutoConfiguration matched:
+ - @ConditionalOnBean (types: org.apache.ibatis.session.SqlSessionFactory; SearchStrategy: all) found bean 'sqlSessionFactory' (OnBeanCondition)
+
+ PersistenceExceptionTranslationAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor' (OnClassCondition)
+
+ PersistenceExceptionTranslationAutoConfiguration#persistenceExceptionTranslationPostProcessor matched:
+ - @ConditionalOnProperty (spring.dao.exceptiontranslation.enabled) matched (OnPropertyCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ PropertyPlaceholderAutoConfiguration#propertySourcesPlaceholderConfigurer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.context.support.PropertySourcesPlaceholderConfigurer; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ QuartzAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.quartz.Scheduler', 'org.springframework.scheduling.quartz.SchedulerFactoryBean', 'org.springframework.transaction.PlatformTransactionManager' (OnClassCondition)
+
+ QuartzAutoConfiguration#quartzScheduler matched:
+ - @ConditionalOnMissingBean (types: org.springframework.scheduling.quartz.SchedulerFactoryBean; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ReactiveHealthEndpointConfiguration matched:
+ - @ConditionalOnClass found required class 'reactor.core.publisher.Flux' (OnClassCondition)
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.health.HealthEndpoint; SearchStrategy: all) found bean 'healthEndpoint' (OnBeanCondition)
+
+ ReactiveHealthEndpointConfiguration#reactiveHealthContributorRegistry matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.health.ReactiveHealthContributorRegistry; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedisAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.redis.core.RedisOperations' (OnClassCondition)
+
+ RedisHealthContributorAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.redis.connection.RedisConnectionFactory' (OnClassCondition)
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+ - @ConditionalOnBean (types: org.springframework.data.redis.connection.RedisConnectionFactory; SearchStrategy: all) found bean 'redissonConnectionFactory' (OnBeanCondition)
+
+ RedisReactiveAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.data.redis.connection.ReactiveRedisConnectionFactory', 'org.springframework.data.redis.core.ReactiveRedisTemplate', 'reactor.core.publisher.Flux' (OnClassCondition)
+
+ RedisReactiveAutoConfiguration#reactiveRedisTemplate matched:
+ - @ConditionalOnBean (types: org.springframework.data.redis.connection.ReactiveRedisConnectionFactory; SearchStrategy: all) found bean 'redissonConnectionFactory'; @ConditionalOnMissingBean (names: reactiveRedisTemplate; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedisReactiveAutoConfiguration#reactiveStringRedisTemplate matched:
+ - @ConditionalOnBean (types: org.springframework.data.redis.connection.ReactiveRedisConnectionFactory; SearchStrategy: all) found bean 'redissonConnectionFactory'; @ConditionalOnMissingBean (names: reactiveStringRedisTemplate; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedisReactiveHealthContributorAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.data.redis.connection.ReactiveRedisConnectionFactory', 'reactor.core.publisher.Flux' (OnClassCondition)
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+ - @ConditionalOnBean (types: org.springframework.data.redis.connection.ReactiveRedisConnectionFactory; SearchStrategy: all) found bean 'redissonConnectionFactory' (OnBeanCondition)
+
+ RedisReactiveHealthContributorAutoConfiguration#redisHealthContributor matched:
+ - @ConditionalOnMissingBean (names: redisHealthIndicator,redisHealthContributor; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedisRepositoriesAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.redis.repository.configuration.EnableRedisRepositories' (OnClassCondition)
+ - @ConditionalOnProperty (spring.data.redis.repositories.enabled=true) matched (OnPropertyCondition)
+ - @ConditionalOnBean (types: org.springframework.data.redis.connection.RedisConnectionFactory; SearchStrategy: all) found bean 'redissonConnectionFactory'; @ConditionalOnMissingBean (types: org.springframework.data.redis.repository.support.RedisRepositoryFactoryBean; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedissonAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.redisson.Redisson', 'org.springframework.data.redis.core.RedisOperations' (OnClassCondition)
+
+ RedissonAutoConfiguration#redissonConnectionFactory matched:
+ - @ConditionalOnMissingBean (types: org.springframework.data.redis.connection.RedisConnectionFactory; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedissonAutoConfiguration#redissonReactive matched:
+ - @ConditionalOnMissingBean (types: org.redisson.api.RedissonReactiveClient; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedissonAutoConfiguration#redissonRxJava matched:
+ - @ConditionalOnMissingBean (types: org.redisson.api.RedissonRxClient; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedissonAutoConfiguration#stringRedisTemplate matched:
+ - @ConditionalOnMissingBean (types: org.springframework.data.redis.core.StringRedisTemplate; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RedissonCacheStatisticsAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.boot.actuate.metrics.cache.CacheMeterBinderProvider', 'org.redisson.spring.cache.RedissonCache' (OnClassCondition)
+ - @ConditionalOnBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) found bean 'cacheManager' (OnBeanCondition)
+
+ RepositoryMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.repository.Repository' (OnClassCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ RepositoryMetricsAutoConfiguration#metricsRepositoryMethodInvocationListener matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.metrics.data.MetricsRepositoryMethodInvocationListener; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RepositoryMetricsAutoConfiguration#repositoryTagsProvider matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.metrics.data.RepositoryTagsProvider; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RestTemplateAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.client.RestTemplate' (OnClassCondition)
+ - NoneNestedConditions 0 matched 1 did not; NestedCondition on RestTemplateAutoConfiguration.NotReactiveWebApplicationCondition.ReactiveWebApplication did not find reactive web application classes (RestTemplateAutoConfiguration.NotReactiveWebApplicationCondition)
+
+ RestTemplateAutoConfiguration#restTemplateBuilder matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.web.client.RestTemplateBuilder; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RestTemplateAutoConfiguration#restTemplateBuilderConfigurer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.web.client.RestTemplateBuilderConfigurer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ RestTemplateMetricsConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.client.RestTemplate' (OnClassCondition)
+ - @ConditionalOnBean (types: org.springframework.boot.web.client.RestTemplateBuilder; SearchStrategy: all) found bean 'restTemplateBuilder' (OnBeanCondition)
+
+ RestTemplateMetricsConfiguration#restTemplateExchangeTagsProvider matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.metrics.web.client.RestTemplateExchangeTagsProvider; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SecurityAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.security.authentication.DefaultAuthenticationEventPublisher' (OnClassCondition)
+
+ SecurityAutoConfiguration#authenticationEventPublisher matched:
+ - @ConditionalOnMissingBean (types: org.springframework.security.authentication.AuthenticationEventPublisher; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SecurityFilterAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.security.web.context.AbstractSecurityWebApplicationInitializer', 'org.springframework.security.config.http.SessionCreationPolicy' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ SecurityFilterAutoConfiguration#securityFilterChainRegistration matched:
+ - @ConditionalOnBean (names: springSecurityFilterChain; SearchStrategy: all) found bean 'springSecurityFilterChain' (OnBeanCondition)
+
+ SecurityRequestMatchersManagementContextConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.security.web.util.matcher.RequestMatcher' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ SecurityRequestMatchersManagementContextConfiguration.MvcRequestMatcherConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - @ConditionalOnBean (types: org.springframework.boot.autoconfigure.web.servlet.DispatcherServletPath; SearchStrategy: all) found bean 'dispatcherServletRegistration' (OnBeanCondition)
+
+ SecurityRequestMatchersManagementContextConfiguration.MvcRequestMatcherConfiguration#requestMatcherProvider matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.security.servlet.RequestMatcherProvider; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ServletEndpointManagementContextConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ ServletEndpointManagementContextConfiguration.WebMvcServletEndpointManagementContextConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+
+ ServletManagementContextAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.servlet.Servlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ ServletWebServerFactoryAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.servlet.ServletRequest' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ ServletWebServerFactoryAutoConfiguration#tomcatServletWebServerFactoryCustomizer matched:
+ - @ConditionalOnClass found required class 'org.apache.catalina.startup.Tomcat' (OnClassCondition)
+
+ ServletWebServerFactoryConfiguration.EmbeddedTomcat matched:
+ - @ConditionalOnClass found required classes 'javax.servlet.Servlet', 'org.apache.catalina.startup.Tomcat', 'org.apache.coyote.UpgradeProtocol' (OnClassCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.boot.web.servlet.server.ServletWebServerFactory; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ SimpleMetricsExportAutoConfiguration matched:
+ - @ConditionalOnEnabledMetricsExport management.metrics.export.defaults.enabled is considered true (OnMetricsExportEnabledCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.Clock; SearchStrategy: all) found bean 'micrometerClock'; @ConditionalOnMissingBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SimpleMetricsExportAutoConfiguration#simpleConfig matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.simple.SimpleConfig; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SpringDataWebAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.springframework.data.web.PageableHandlerMethodArgumentResolver', 'org.springframework.web.servlet.config.annotation.WebMvcConfigurer' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.data.web.PageableHandlerMethodArgumentResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SpringDataWebAutoConfiguration#pageableCustomizer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.data.web.config.PageableHandlerMethodArgumentResolverCustomizer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SpringDataWebAutoConfiguration#sortCustomizer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.data.web.config.SortHandlerMethodArgumentResolverCustomizer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SpringfoxWebMvcConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ SqlInitializationAutoConfiguration matched:
+ - @ConditionalOnProperty (spring.sql.init.enabled) matched (OnPropertyCondition)
+ - NoneNestedConditions 0 matched 1 did not; NestedCondition on SqlInitializationAutoConfiguration.SqlInitializationModeCondition.ModeIsNever @ConditionalOnProperty (spring.sql.init.mode=never) did not find property 'mode' (SqlInitializationAutoConfiguration.SqlInitializationModeCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.boot.sql.init.AbstractScriptDatabaseInitializer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ Swagger2ControllerWebMvc matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ Swagger2WebMvcConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ SwaggerUiWebMvcConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (springfox.documentation.swagger-ui.enabled=true) matched (OnPropertyCondition)
+
+ SystemMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.instrument.MeterRegistry' (OnClassCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ SystemMetricsAutoConfiguration#fileDescriptorMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.system.FileDescriptorMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SystemMetricsAutoConfiguration#processorMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.system.ProcessorMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ SystemMetricsAutoConfiguration#uptimeMetrics matched:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.system.UptimeMetrics; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TaskExecutionAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor' (OnClassCondition)
+
+ TaskExecutionAutoConfiguration#taskExecutorBuilder matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.task.TaskExecutorBuilder; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TaskSchedulingAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler' (OnClassCondition)
+
+ TaskSchedulingAutoConfiguration#taskSchedulerBuilder matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.task.TaskSchedulerBuilder; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TomcatMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'io.micrometer.core.instrument.binder.tomcat.TomcatMetrics', 'org.apache.catalina.Manager' (OnClassCondition)
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+
+ TomcatMetricsAutoConfiguration#tomcatMetricsBinder matched:
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry'; @ConditionalOnMissingBean (types: io.micrometer.core.instrument.binder.tomcat.TomcatMetrics,org.springframework.boot.actuate.metrics.web.tomcat.TomcatMetricsBinder; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TransactionAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.transaction.PlatformTransactionManager' (OnClassCondition)
+
+ TransactionAutoConfiguration#platformTransactionManagerCustomizers matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TransactionAutoConfiguration.EnableTransactionManagementConfiguration matched:
+ - @ConditionalOnBean (types: org.springframework.transaction.TransactionManager; SearchStrategy: all) found bean 'transactionManager'; @ConditionalOnMissingBean (types: org.springframework.transaction.annotation.AbstractTransactionManagementConfiguration; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TransactionAutoConfiguration.EnableTransactionManagementConfiguration.CglibAutoProxyConfiguration matched:
+ - @ConditionalOnProperty (spring.aop.proxy-target-class=true) matched (OnPropertyCondition)
+
+ TransactionAutoConfiguration.TransactionTemplateConfiguration matched:
+ - @ConditionalOnSingleCandidate (types: org.springframework.transaction.PlatformTransactionManager; SearchStrategy: all) found a single bean 'transactionManager' (OnBeanCondition)
+
+ TransactionAutoConfiguration.TransactionTemplateConfiguration#transactionTemplate matched:
+ - @ConditionalOnMissingBean (types: org.springframework.transaction.support.TransactionOperations; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ValidationAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'javax.validation.executable.ExecutableValidator' (OnClassCondition)
+ - @ConditionalOnResource found location classpath:META-INF/services/javax.validation.spi.ValidationProvider (OnResourceCondition)
+
+ ValidationAutoConfiguration#defaultValidator matched:
+ - @ConditionalOnMissingBean (types: javax.validation.Validator; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ ValidationAutoConfiguration#methodValidationPostProcessor matched:
+ - @ConditionalOnMissingBean (types: org.springframework.validation.beanvalidation.MethodValidationPostProcessor; SearchStrategy: current) did not find any beans (OnBeanCondition)
+
+ WebEndpointAutoConfiguration matched:
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+
+ WebEndpointAutoConfiguration#controllerEndpointDiscoverer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.annotation.ControllerEndpointsSupplier; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebEndpointAutoConfiguration#endpointMediaTypes matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.EndpointMediaTypes; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebEndpointAutoConfiguration#pathMappedEndpoints matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.PathMappedEndpoints; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebEndpointAutoConfiguration#webEndpointDiscoverer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.WebEndpointsSupplier; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebEndpointAutoConfiguration.WebEndpointServletConfiguration matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ WebEndpointAutoConfiguration.WebEndpointServletConfiguration#servletEndpointDiscoverer matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.annotation.ServletEndpointsSupplier; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.servlet.Servlet', 'org.springframework.web.servlet.DispatcherServlet', 'org.springframework.web.servlet.config.annotation.WebMvcConfigurer' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration#formContentFilter matched:
+ - @ConditionalOnProperty (spring.mvc.formcontent.filter.enabled) matched (OnPropertyCondition)
+ - @ConditionalOnMissingBean (types: org.springframework.web.filter.FormContentFilter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration.EnableWebMvcConfiguration#flashMapManager matched:
+ - @ConditionalOnMissingBean (names: flashMapManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration.EnableWebMvcConfiguration#themeResolver matched:
+ - @ConditionalOnMissingBean (names: themeResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration.WebMvcAutoConfigurationAdapter#defaultViewResolver matched:
+ - @ConditionalOnMissingBean (types: org.springframework.web.servlet.view.InternalResourceViewResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration.WebMvcAutoConfigurationAdapter#requestContextFilter matched:
+ - @ConditionalOnMissingBean (types: org.springframework.web.context.request.RequestContextListener,org.springframework.web.filter.RequestContextFilter; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcAutoConfiguration.WebMvcAutoConfigurationAdapter#viewResolver matched:
+ - @ConditionalOnBean (types: org.springframework.web.servlet.ViewResolver; SearchStrategy: all) found beans 'defaultViewResolver', 'beanNameViewResolver', 'mvcViewResolver'; @ConditionalOnMissingBean (names: viewResolver types: org.springframework.web.servlet.view.ContentNegotiatingViewResolver; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcEndpointManagementContextConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnBean (types: org.springframework.web.servlet.DispatcherServlet,org.springframework.boot.actuate.endpoint.web.WebEndpointsSupplier; SearchStrategy: all) found beans 'webEndpointDiscoverer', 'dispatcherServlet' (OnBeanCondition)
+
+ WebMvcEndpointManagementContextConfiguration#controllerEndpointHandlerMapping matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.servlet.ControllerEndpointHandlerMapping; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcEndpointManagementContextConfiguration#webEndpointServletHandlerMapping matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.endpoint.web.servlet.WebMvcEndpointHandlerMapping; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcMetricsAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found bean 'simpleMeterRegistry' (OnBeanCondition)
+
+ WebMvcMetricsAutoConfiguration#webMvcTagsProvider matched:
+ - @ConditionalOnMissingBean (types: org.springframework.boot.actuate.metrics.web.servlet.WebMvcTagsProvider; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebMvcRequestHandlerProvider matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ WebSecurityEnablerConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.security.config.annotation.web.configuration.EnableWebSecurity' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnMissingBean (names: springSecurityFilterChain; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ WebSocketMessagingAutoConfiguration matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ WebSocketServletAutoConfiguration matched:
+ - @ConditionalOnClass found required classes 'javax.servlet.Servlet', 'javax.websocket.server.ServerContainer' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+
+ WebSocketServletAutoConfiguration.TomcatWebSocketConfiguration matched:
+ - @ConditionalOnClass found required classes 'org.apache.catalina.startup.Tomcat', 'org.apache.tomcat.websocket.server.WsSci' (OnClassCondition)
+
+ WebSocketServletAutoConfiguration.TomcatWebSocketConfiguration#websocketServletWebServerCustomizer matched:
+ - @ConditionalOnMissingBean (names: websocketServletWebServerCustomizer; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+
+Negative matches:
+-----------------
+
+ ActiveMQAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.jms.ConnectionFactory' (OnClassCondition)
+
+ AopAutoConfiguration.AspectJAutoProxyingConfiguration.JdkDynamicAutoProxyConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.aop.proxy-target-class=false) did not find property 'proxy-target-class' (OnPropertyCondition)
+
+ AopAutoConfiguration.ClassProxyingConfiguration:
+ Did not match:
+ - @ConditionalOnMissingClass found unwanted class 'org.aspectj.weaver.Advice' (OnClassCondition)
+
+ AppOpticsMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.appoptics.AppOpticsMeterRegistry' (OnClassCondition)
+
+ ArtemisAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.jms.ConnectionFactory' (OnClassCondition)
+
+ AtlasMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.atlas.AtlasMeterRegistry' (OnClassCondition)
+
+ AuditAutoConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.audit.AuditEventRepository; SearchStrategy: all) did not find any beans of type org.springframework.boot.actuate.audit.AuditEventRepository (OnBeanCondition)
+ Matched:
+ - @ConditionalOnProperty (management.auditevents.enabled) matched (OnPropertyCondition)
+
+ AuditEventsEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.auditevents.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ AvailabilityHealthContributorAutoConfiguration#livenessStateHealthIndicator:
+ Did not match:
+ - @ConditionalOnProperty (management.health.livenessstate.enabled=true) did not find property 'enabled' (OnPropertyCondition)
+
+ AvailabilityHealthContributorAutoConfiguration#readinessStateHealthIndicator:
+ Did not match:
+ - @ConditionalOnProperty (management.health.readinessstate.enabled=true) did not find property 'enabled' (OnPropertyCondition)
+
+ AvailabilityProbesAutoConfiguration:
+ Did not match:
+ - Probes availability not running on a supported cloud platform (AvailabilityProbesAutoConfiguration.ProbesCondition)
+
+ BatchAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.batch.core.launch.JobLauncher' (OnClassCondition)
+
+ BeansEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.beans.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ CacheAutoConfiguration.CacheManagerEntityManagerFactoryDependsOnPostProcessor:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean' (OnClassCondition)
+
+ CacheMeterBinderProvidersConfiguration.CaffeineCacheMeterBinderProviderConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.github.benmanes.caffeine.cache.Cache' (OnClassCondition)
+
+ CacheMeterBinderProvidersConfiguration.EhCache2CacheMeterBinderProviderConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'net.sf.ehcache.Ehcache' (OnClassCondition)
+
+ CacheMeterBinderProvidersConfiguration.HazelcastCacheMeterBinderProviderConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'com.hazelcast.spring.cache.HazelcastCache', 'com.hazelcast.core.Hazelcast' (OnClassCondition)
+
+ CachesEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.caches.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.cache.CacheManager' (OnClassCondition)
+
+ CaffeineCacheConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.github.benmanes.caffeine.cache.Caffeine' (OnClassCondition)
+
+ CassandraAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ CassandraDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ CassandraHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ CassandraReactiveDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ CassandraReactiveHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ CassandraReactiveRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.cassandra.ReactiveSession' (OnClassCondition)
+
+ CassandraRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.datastax.oss.driver.api.core.CqlSession' (OnClassCondition)
+
+ ClientHttpConnectorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.function.client.WebClient' (OnClassCondition)
+
+ CloudFoundryActuatorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnCloudPlatform did not find CLOUD_FOUNDRY (OnCloudPlatformCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.web.servlet.DispatcherServlet' (OnClassCondition)
+ - found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (management.cloudfoundry.enabled) matched (OnPropertyCondition)
+
+ CodecsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.function.client.WebClient' (OnClassCondition)
+
+ CompositeMeterRegistryConfiguration:
+ Did not match:
+ - NoneNestedConditions 1 matched 1 did not; NestedCondition on CompositeMeterRegistryConfiguration.MultipleNonPrimaryMeterRegistriesCondition.SingleInjectableMeterRegistry @ConditionalOnSingleCandidate (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found a single bean 'simpleMeterRegistry'; NestedCondition on CompositeMeterRegistryConfiguration.MultipleNonPrimaryMeterRegistriesCondition.NoMeterRegistryCondition @ConditionalOnMissingBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found beans of type 'io.micrometer.core.instrument.MeterRegistry' simpleMeterRegistry (CompositeMeterRegistryConfiguration.MultipleNonPrimaryMeterRegistriesCondition)
+
+ ConditionsReportEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.conditions.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ ConfigurationPropertiesReportEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.configprops.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ ConnectionFactoryHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.r2dbc.spi.ConnectionFactory' (OnClassCondition)
+
+ ConnectionPoolMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.r2dbc.pool.ConnectionPool' (OnClassCondition)
+
+ CouchbaseAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseCacheConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Bucket' (OnClassCondition)
+
+ CouchbaseHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseReactiveDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseReactiveHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseReactiveRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Cluster' (OnClassCondition)
+
+ CouchbaseRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.couchbase.client.java.Bucket' (OnClassCondition)
+
+ DataSourcePoolMetricsAutoConfiguration.DataSourcePoolMetadataMetricsConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.boot.jdbc.metadata.DataSourcePoolMetadataProvider; SearchStrategy: all) did not find any beans of type org.springframework.boot.jdbc.metadata.DataSourcePoolMetadataProvider (OnBeanCondition)
+
+ DatadogMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.datadog.DatadogMeterRegistry' (OnClassCondition)
+
+ DispatcherServletAutoConfiguration.DispatcherServletConfiguration#multipartResolver:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.web.multipart.MultipartResolver; SearchStrategy: all) did not find any beans of type org.springframework.web.multipart.MultipartResolver (OnBeanCondition)
+
+ DruidConfig#oneDataSource:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.one.enabled=true) found different value in property 'enabled' (OnPropertyCondition)
+
+ DruidConfig#slaveDataSource:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.slave.enabled=true) found different value in property 'enabled' (OnPropertyCondition)
+
+ DruidConfig#twoDataSource:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.two.enabled=true) found different value in property 'enabled' (OnPropertyCondition)
+
+ DruidDataSourceAutoConfigure#dataSource:
+ Did not match:
+ - @ConditionalOnMissingBean (types: javax.sql.DataSource; SearchStrategy: all) found beans of type 'javax.sql.DataSource' masterDataSource, dynamicDataSource (OnBeanCondition)
+
+ DruidFilterConfiguration#commonsLogFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.commons-log.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#configFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.config.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#encodingConvertFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.encoding.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#log4j2Filter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.log4j2.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#log4jFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.log4j.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#slf4jLogFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.slf4j.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#wallConfig:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.wall.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidFilterConfiguration#wallFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.filter.wall.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ DruidSpringAopConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.druid.aop-patterns) did not find property 'spring.datasource.druid.aop-patterns' (OnPropertyCondition)
+
+ DynamicDataSourceAutoConfiguration#dataSource:
+ Did not match:
+ - @ConditionalOnMissingBean (types: javax.sql.DataSource; SearchStrategy: all) found beans of type 'javax.sql.DataSource' masterDataSource, dynamicDataSource (OnBeanCondition)
+
+ DynamicDataSourceCreatorAutoConfiguration.BeeCpDataSourceCreatorConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'cn.beecp.BeeDataSource' (OnClassCondition)
+
+ DynamicDataSourceCreatorAutoConfiguration.Dbcp2DataSourceCreatorConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.apache.commons.dbcp2.BasicDataSource' (OnClassCondition)
+
+ DynatraceMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.dynatrace.DynatraceMeterRegistry' (OnClassCondition)
+
+ EhCacheCacheConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'net.sf.ehcache.Cache' (OnClassCondition)
+
+ ElasticMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.elastic.ElasticMeterRegistry' (OnClassCondition)
+
+ ElasticSearchReactiveHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient' (OnClassCondition)
+
+ ElasticSearchRestHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.elasticsearch.client.RestHighLevelClient' (OnClassCondition)
+
+ ElasticsearchDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate' (OnClassCondition)
+
+ ElasticsearchRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.elasticsearch.client.Client' (OnClassCondition)
+
+ ElasticsearchRestClientAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.elasticsearch.client.RestHighLevelClient' (OnClassCondition)
+
+ EmbeddedLdapAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.unboundid.ldap.listener.InMemoryDirectoryServer' (OnClassCondition)
+
+ EmbeddedMongoAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.MongoClientSettings' (OnClassCondition)
+
+ EmbeddedWebServerFactoryCustomizerAutoConfiguration.JettyWebServerFactoryCustomizerConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'org.eclipse.jetty.server.Server', 'org.eclipse.jetty.util.Loader', 'org.eclipse.jetty.webapp.WebAppContext' (OnClassCondition)
+
+ EmbeddedWebServerFactoryCustomizerAutoConfiguration.NettyWebServerFactoryCustomizerConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'reactor.netty.http.server.HttpServer' (OnClassCondition)
+
+ EmbeddedWebServerFactoryCustomizerAutoConfiguration.UndertowWebServerFactoryCustomizerConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'io.undertow.Undertow', 'org.xnio.SslClientAuthMode' (OnClassCondition)
+
+ EnvironmentEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.env.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ ErrorWebFluxAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.config.WebFluxConfigurer' (OnClassCondition)
+
+ FlywayAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.flywaydb.core.Flyway' (OnClassCondition)
+
+ FlywayEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.flywaydb.core.Flyway' (OnClassCondition)
+
+ FreeMarkerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'freemarker.template.Configuration' (OnClassCondition)
+
+ GangliaMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.ganglia.GangliaMeterRegistry' (OnClassCondition)
+
+ GenericCacheConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.cache.Cache; SearchStrategy: all) did not find any beans of type org.springframework.cache.Cache (OnBeanCondition)
+ Matched:
+ - Cache org.springframework.boot.autoconfigure.cache.GenericCacheConfiguration automatic cache type (CacheCondition)
+
+ GraphiteMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.graphite.GraphiteMeterRegistry' (OnClassCondition)
+
+ GroovyTemplateAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'groovy.text.markup.MarkupTemplateEngine' (OnClassCondition)
+
+ GsonAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.google.gson.Gson' (OnClassCondition)
+
+ GsonHttpMessageConvertersConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.google.gson.Gson' (OnClassCondition)
+
+ H2ConsoleAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.h2.server.web.WebServlet' (OnClassCondition)
+
+ HazelcastAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.hazelcast.core.HazelcastInstance' (OnClassCondition)
+
+ HazelcastCacheConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.hazelcast.core.HazelcastInstance' (OnClassCondition)
+
+ HazelcastHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.hazelcast.core.HazelcastInstance' (OnClassCondition)
+
+ HazelcastJCacheCustomizationConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.hazelcast.core.HazelcastInstance' (OnClassCondition)
+
+ HazelcastJpaDependencyAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.hazelcast.core.HazelcastInstance' (OnClassCondition)
+
+ HealthEndpointReactiveWebExtensionConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ HeapDumpWebEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.heapdump.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ HibernateJpaAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.persistence.EntityManager' (OnClassCondition)
+
+ HibernateMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.persistence.EntityManagerFactory' (OnClassCondition)
+
+ HttpHandlerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.DispatcherHandler' (OnClassCondition)
+
+ HttpTraceAutoConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.boot.actuate.trace.http.HttpTraceRepository; SearchStrategy: all) did not find any beans of type org.springframework.boot.actuate.trace.http.HttpTraceRepository (OnBeanCondition)
+ Matched:
+ - @ConditionalOnWebApplication (required) found 'session' scope (OnWebApplicationCondition)
+ - @ConditionalOnProperty (management.trace.http.enabled) matched (OnPropertyCondition)
+
+ HttpTraceAutoConfiguration.ReactiveTraceFilterConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+ - Ancestor org.springframework.boot.actuate.autoconfigure.trace.http.HttpTraceAutoConfiguration did not match (ConditionEvaluationReport.AncestorsMatchedCondition)
+
+ HttpTraceAutoConfiguration.ServletTraceFilterConfiguration:
+ Did not match:
+ - Ancestor org.springframework.boot.actuate.autoconfigure.trace.http.HttpTraceAutoConfiguration did not match (ConditionEvaluationReport.AncestorsMatchedCondition)
+ Matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ HttpTraceEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.httptrace.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ HumioMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.humio.HumioMeterRegistry' (OnClassCondition)
+
+ HypermediaAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.hateoas.EntityModel' (OnClassCondition)
+
+ InfinispanCacheConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.infinispan.spring.embedded.provider.SpringEmbeddedCacheManager' (OnClassCondition)
+
+ InfluxDbAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.influxdb.InfluxDB' (OnClassCondition)
+
+ InfluxDbHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.influxdb.InfluxDB' (OnClassCondition)
+
+ InfluxMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.influx.InfluxMeterRegistry' (OnClassCondition)
+
+ InfoContributorAutoConfiguration#buildInfoContributor:
+ Did not match:
+ - @ConditionalOnSingleCandidate (types: org.springframework.boot.info.BuildProperties; SearchStrategy: all) did not find any beans (OnBeanCondition)
+ Matched:
+ - @ConditionalOnEnabledInfoContributor management.info.defaults.enabled is considered true (OnEnabledInfoContributorCondition)
+
+ InfoContributorAutoConfiguration#gitInfoContributor:
+ Did not match:
+ - @ConditionalOnSingleCandidate (types: org.springframework.boot.info.GitProperties; SearchStrategy: all) did not find any beans (OnBeanCondition)
+ Matched:
+ - @ConditionalOnEnabledInfoContributor management.info.defaults.enabled is considered true (OnEnabledInfoContributorCondition)
+
+ InfoEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.info.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ IntegrationAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.integration.config.EnableIntegration' (OnClassCondition)
+
+ IntegrationGraphEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.integration.graph.IntegrationGraphServer' (OnClassCondition)
+
+ JacksonHttpMessageConvertersConfiguration.MappingJackson2XmlHttpMessageConverterConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.fasterxml.jackson.dataformat.xml.XmlMapper' (OnClassCondition)
+
+ JdbcRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration' (OnClassCondition)
+
+ JedisConnectionConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'redis.clients.jedis.Jedis' (OnClassCondition)
+
+ JerseyAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.glassfish.jersey.server.spring.SpringComponentProvider' (OnClassCondition)
+
+ JerseySameManagementContextConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.glassfish.jersey.server.ResourceConfig' (OnClassCondition)
+
+ JerseyServerMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.jersey2.server.MetricsApplicationEventListener' (OnClassCondition)
+
+ JerseyWebEndpointManagementContextConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.glassfish.jersey.server.ResourceConfig' (OnClassCondition)
+
+ JettyMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.eclipse.jetty.server.Server' (OnClassCondition)
+
+ JmsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.jms.Message' (OnClassCondition)
+
+ JmsHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.jms.ConnectionFactory' (OnClassCondition)
+
+ JmxAutoConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.jmx.enabled=true) did not find property 'enabled' (OnPropertyCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.jmx.export.MBeanExporter' (OnClassCondition)
+
+ JmxEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.jmx.enabled=true) did not find property 'enabled' (OnPropertyCondition)
+
+ JmxMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.jmx.JmxMeterRegistry' (OnClassCondition)
+
+ JndiConnectionFactoryAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.jms.core.JmsTemplate' (OnClassCondition)
+
+ JndiDataSourceAutoConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.datasource.jndi-name) did not find property 'jndi-name' (OnPropertyCondition)
+ Matched:
+ - @ConditionalOnClass found required classes 'javax.sql.DataSource', 'org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType' (OnClassCondition)
+
+ JolokiaEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.jolokia.http.AgentServlet' (OnClassCondition)
+
+ JooqAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.jooq.DSLContext' (OnClassCondition)
+
+ JpaRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.jpa.repository.JpaRepository' (OnClassCondition)
+
+ JsonbAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.json.bind.Jsonb' (OnClassCondition)
+
+ JsonbHttpMessageConvertersConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.json.bind.Jsonb' (OnClassCondition)
+
+ JtaAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.transaction.Transaction' (OnClassCondition)
+
+ KafkaAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.kafka.core.KafkaTemplate' (OnClassCondition)
+
+ KafkaMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.kafka.core.ProducerFactory' (OnClassCondition)
+
+ KairosMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.kairos.KairosMeterRegistry' (OnClassCondition)
+
+ LdapAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.ldap.core.ContextSource' (OnClassCondition)
+
+ LdapHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.ldap.core.LdapOperations' (OnClassCondition)
+
+ LdapRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.ldap.repository.LdapRepository' (OnClassCondition)
+
+ LettuceConnectionConfiguration#redisConnectionFactory:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.data.redis.connection.RedisConnectionFactory; SearchStrategy: all) found beans of type 'org.springframework.data.redis.connection.RedisConnectionFactory' redissonConnectionFactory (OnBeanCondition)
+
+ LiquibaseAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'liquibase.change.DatabaseChange' (OnClassCondition)
+
+ LiquibaseEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'liquibase.integration.spring.SpringLiquibase' (OnClassCondition)
+
+ Log4J2MetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.apache.logging.log4j.core.LoggerContext' (OnClassCondition)
+
+ LogFileWebEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.logfile.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ LoggersEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.loggers.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ MailHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.mail.javamail.JavaMailSenderImpl; SearchStrategy: all) did not find any beans of type org.springframework.mail.javamail.JavaMailSenderImpl (OnBeanCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.mail.javamail.JavaMailSenderImpl' (OnClassCondition)
+ - @ConditionalOnEnabledHealthIndicator management.health.defaults.enabled is considered true (OnEnabledHealthIndicatorCondition)
+
+ MailSenderAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.mail.internet.MimeMessage' (OnClassCondition)
+
+ MailSenderValidatorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.mail.test-connection) did not find property 'test-connection' (OnPropertyCondition)
+
+ ManagementContextAutoConfiguration.DifferentManagementContextConfiguration:
+ Did not match:
+ - Management Port actual port type (SAME) did not match required type (DIFFERENT) (OnManagementPortCondition)
+
+ ManagementWebSecurityAutoConfiguration:
+ Did not match:
+ - AllNestedConditions 1 matched 1 did not; NestedCondition on DefaultWebSecurityCondition.Beans @ConditionalOnMissingBean (types: org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter,org.springframework.security.web.SecurityFilterChain; SearchStrategy: all) found beans of type 'org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter' securityConfig; NestedCondition on DefaultWebSecurityCondition.Classes @ConditionalOnClass found required classes 'org.springframework.security.web.SecurityFilterChain', 'org.springframework.security.config.annotation.web.builders.HttpSecurity' (DefaultWebSecurityCondition)
+ Matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ MappingsEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.mappings.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ MetricsEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.metrics.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'io.micrometer.core.annotation.Timed' (OnClassCondition)
+
+ MongoAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.client.MongoClient' (OnClassCondition)
+
+ MongoDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.client.MongoClient' (OnClassCondition)
+
+ MongoHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.mongodb.core.MongoTemplate' (OnClassCondition)
+
+ MongoMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.MongoClientSettings' (OnClassCondition)
+
+ MongoReactiveAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.reactivestreams.client.MongoClient' (OnClassCondition)
+
+ MongoReactiveDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.reactivestreams.client.MongoClient' (OnClassCondition)
+
+ MongoReactiveHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.mongodb.core.ReactiveMongoTemplate' (OnClassCondition)
+
+ MongoReactiveRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.reactivestreams.client.MongoClient' (OnClassCondition)
+
+ MongoRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.mongodb.client.MongoClient' (OnClassCondition)
+
+ MustacheAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.samskivert.mustache.Mustache' (OnClassCondition)
+
+ MybatisAutoConfiguration#sqlSessionFactory:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.apache.ibatis.session.SqlSessionFactory; SearchStrategy: all) found beans of type 'org.apache.ibatis.session.SqlSessionFactory' sqlSessionFactory (OnBeanCondition)
+
+ MybatisAutoConfiguration.MapperScannerRegistrarNotFoundConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.mybatis.spring.mapper.MapperFactoryBean,org.mybatis.spring.mapper.MapperScannerConfigurer; SearchStrategy: all) found beans of type 'org.mybatis.spring.mapper.MapperScannerConfigurer' com.os.framework.config.ApplicationConfig#MapperScannerRegistrar#0 (OnBeanCondition)
+
+ MybatisLanguageDriverAutoConfiguration.FreeMarkerConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'org.mybatis.scripting.freemarker.FreeMarkerLanguageDriver', 'org.mybatis.scripting.freemarker.FreeMarkerLanguageDriverConfig' (OnClassCondition)
+
+ MybatisLanguageDriverAutoConfiguration.LegacyFreeMarkerConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.mybatis.scripting.freemarker.FreeMarkerLanguageDriver' (OnClassCondition)
+
+ MybatisLanguageDriverAutoConfiguration.LegacyVelocityConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.mybatis.scripting.velocity.Driver' (OnClassCondition)
+
+ MybatisLanguageDriverAutoConfiguration.ThymeleafConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.mybatis.scripting.thymeleaf.ThymeleafLanguageDriver' (OnClassCondition)
+
+ MybatisLanguageDriverAutoConfiguration.VelocityConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'org.mybatis.scripting.velocity.VelocityLanguageDriver', 'org.mybatis.scripting.velocity.VelocityLanguageDriverConfig' (OnClassCondition)
+
+ Neo4jAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ Neo4jDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ Neo4jHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ Neo4jReactiveDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ Neo4jReactiveRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ Neo4jRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.neo4j.driver.Driver' (OnClassCondition)
+
+ NewRelicMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.newrelic.NewRelicMeterRegistry' (OnClassCondition)
+
+ NoOpCacheConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) found beans of type 'org.springframework.cache.CacheManager' cacheManager (OnBeanCondition)
+ Matched:
+ - Cache org.springframework.boot.autoconfigure.cache.NoOpCacheConfiguration automatic cache type (CacheCondition)
+
+ NoOpMeterRegistryConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: io.micrometer.core.instrument.MeterRegistry; SearchStrategy: all) found beans of type 'io.micrometer.core.instrument.MeterRegistry' simpleMeterRegistry (OnBeanCondition)
+
+ OAuth2ClientAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.oauth2.client.registration.ClientRegistration' (OnClassCondition)
+
+ OAuth2ResourceServerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.oauth2.server.resource.BearerTokenAuthenticationToken' (OnClassCondition)
+
+ OpenApiControllerWebFlux:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ OpenApiWebFluxConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ ProjectInfoAutoConfiguration#buildProperties:
+ Did not match:
+ - @ConditionalOnResource did not find resource '${spring.info.build.location:classpath:META-INF/build-info.properties}' (OnResourceCondition)
+
+ ProjectInfoAutoConfiguration#gitProperties:
+ Did not match:
+ - GitResource did not find git info at classpath:git.properties (ProjectInfoAutoConfiguration.GitResourceAvailableCondition)
+
+ PrometheusMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.prometheus.PrometheusMeterRegistry' (OnClassCondition)
+
+ QuartzAutoConfiguration.JdbcStoreTypeConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.quartz.job-store-type=jdbc) did not find property 'job-store-type' (OnPropertyCondition)
+
+ QuartzEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.quartz.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.quartz.Scheduler' (OnClassCondition)
+
+ R2dbcAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.r2dbc.spi.ConnectionFactory' (OnClassCondition)
+
+ R2dbcDataAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.r2dbc.core.R2dbcEntityTemplate' (OnClassCondition)
+
+ R2dbcInitializationConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'io.r2dbc.spi.ConnectionFactory', 'org.springframework.r2dbc.connection.init.DatabasePopulator' (OnClassCondition)
+
+ R2dbcRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.r2dbc.spi.ConnectionFactory' (OnClassCondition)
+
+ R2dbcTransactionManagerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.r2dbc.connection.R2dbcTransactionManager' (OnClassCondition)
+
+ RSocketMessagingAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.rsocket.RSocket' (OnClassCondition)
+
+ RSocketRequesterAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.rsocket.RSocket' (OnClassCondition)
+
+ RSocketSecurityAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.rsocket.core.SecuritySocketAcceptorInterceptor' (OnClassCondition)
+
+ RSocketServerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.rsocket.core.RSocketServer' (OnClassCondition)
+
+ RSocketStrategiesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.rsocket.RSocket' (OnClassCondition)
+
+ RabbitAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.rabbitmq.client.Channel' (OnClassCondition)
+
+ RabbitHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.amqp.rabbit.core.RabbitTemplate' (OnClassCondition)
+
+ RabbitMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.rabbitmq.client.ConnectionFactory' (OnClassCondition)
+
+ ReactiveCloudFoundryActuatorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ ReactiveElasticsearchRepositoriesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient' (OnClassCondition)
+
+ ReactiveElasticsearchRestClientAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'reactor.netty.http.client.HttpClient' (OnClassCondition)
+
+ ReactiveManagementContextAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ ReactiveManagementWebSecurityAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ ReactiveOAuth2ClientAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.oauth2.client.registration.ClientRegistration' (OnClassCondition)
+
+ ReactiveOAuth2ResourceServerAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ ReactiveSecurityAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.config.WebFluxConfigurer' (OnClassCondition)
+
+ ReactiveUserDetailsServiceAutoConfiguration:
+ Did not match:
+ - AnyNestedCondition 0 matched 2 did not; NestedCondition on ReactiveUserDetailsServiceAutoConfiguration.ReactiveUserDetailsServiceCondition.ReactiveWebApplicationCondition did not find reactive web application classes; NestedCondition on ReactiveUserDetailsServiceAutoConfiguration.ReactiveUserDetailsServiceCondition.RSocketSecurityEnabledCondition @ConditionalOnBean (types: org.springframework.messaging.rsocket.annotation.support.RSocketMessageHandler; SearchStrategy: all) did not find any beans of type org.springframework.messaging.rsocket.annotation.support.RSocketMessageHandler (ReactiveUserDetailsServiceAutoConfiguration.ReactiveUserDetailsServiceCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.security.authentication.ReactiveAuthenticationManager' (OnClassCondition)
+
+ ReactiveWebServerFactoryAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ RedisAutoConfiguration#redisTemplate:
+ Did not match:
+ - @ConditionalOnMissingBean (names: redisTemplate; SearchStrategy: all) found beans named redisTemplate (OnBeanCondition)
+
+ RedisAutoConfiguration#stringRedisTemplate:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.data.redis.core.StringRedisTemplate; SearchStrategy: all) found beans of type 'org.springframework.data.redis.core.StringRedisTemplate' stringRedisTemplate (OnBeanCondition)
+
+ RedisCacheConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) found beans of type 'org.springframework.cache.CacheManager' cacheManager (OnBeanCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.data.redis.connection.RedisConnectionFactory' (OnClassCondition)
+ - Cache org.springframework.boot.autoconfigure.cache.RedisCacheConfiguration automatic cache type (CacheCondition)
+
+ RedisHealthContributorAutoConfiguration#redisHealthContributor:
+ Did not match:
+ - @ConditionalOnMissingBean (names: redisHealthIndicator,redisHealthContributor; SearchStrategy: all) found beans named redisHealthContributor (OnBeanCondition)
+
+ RedissonAutoConfiguration#redisTemplate:
+ Did not match:
+ - @ConditionalOnMissingBean (names: redisTemplate; SearchStrategy: all) found beans named redisTemplate (OnBeanCondition)
+
+ RedissonAutoConfiguration#redisson:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.redisson.api.RedissonClient; SearchStrategy: all) found beans of type 'org.redisson.api.RedissonClient' redisson (OnBeanCondition)
+
+ RepositoryRestMvcAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration' (OnClassCondition)
+
+ Saml2RelyingPartyAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.saml2.provider.service.registration.RelyingPartyRegistrationRepository' (OnClassCondition)
+
+ ScheduledTasksEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.scheduledtasks.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ SecurityDataConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.security.data.repository.query.SecurityEvaluationContextExtension' (OnClassCondition)
+
+ SecurityRequestMatchersManagementContextConfiguration.JerseyRequestMatcherConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.glassfish.jersey.server.ResourceConfig' (OnClassCondition)
+
+ SendGridAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.sendgrid.SendGrid' (OnClassCondition)
+
+ ServletEndpointManagementContextConfiguration.JerseyServletEndpointManagementContextConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.glassfish.jersey.server.ResourceConfig' (OnClassCondition)
+
+ ServletManagementContextAutoConfiguration.ApplicationContextFilterConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (management.server.add-application-context-header=true) did not find property 'add-application-context-header' (OnPropertyCondition)
+
+ ServletWebServerFactoryAutoConfiguration.ForwardedHeaderFilterConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (server.forward-headers-strategy=framework) did not find property 'server.forward-headers-strategy' (OnPropertyCondition)
+
+ ServletWebServerFactoryConfiguration.EmbeddedJetty:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'org.eclipse.jetty.server.Server', 'org.eclipse.jetty.util.Loader', 'org.eclipse.jetty.webapp.WebAppContext' (OnClassCondition)
+
+ ServletWebServerFactoryConfiguration.EmbeddedUndertow:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'io.undertow.Undertow', 'org.xnio.SslClientAuthMode' (OnClassCondition)
+
+ SessionAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.session.Session' (OnClassCondition)
+
+ SessionsEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.session.FindByIndexNameSessionRepository' (OnClassCondition)
+
+ ShutdownEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.shutdown.enabled found so using endpoint default (OnAvailableEndpointCondition)
+
+ SignalFxMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.signalfx.SignalFxMeterRegistry' (OnClassCondition)
+
+ SimpleCacheConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.cache.CacheManager; SearchStrategy: all) found beans of type 'org.springframework.cache.CacheManager' cacheManager (OnBeanCondition)
+ Matched:
+ - Cache org.springframework.boot.autoconfigure.cache.SimpleCacheConfiguration automatic cache type (CacheCondition)
+
+ SolrAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.apache.solr.client.solrj.impl.CloudSolrClient' (OnClassCondition)
+
+ SolrHealthContributorAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.apache.solr.client.solrj.SolrClient' (OnClassCondition)
+
+ SpringApplicationAdminJmxAutoConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.application.admin.enabled=true) did not find property 'enabled' (OnPropertyCondition)
+
+ SpringBootWebSecurityConfiguration:
+ Did not match:
+ - AllNestedConditions 1 matched 1 did not; NestedCondition on DefaultWebSecurityCondition.Beans @ConditionalOnMissingBean (types: org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter,org.springframework.security.web.SecurityFilterChain; SearchStrategy: all) found beans of type 'org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter' securityConfig; NestedCondition on DefaultWebSecurityCondition.Classes @ConditionalOnClass found required classes 'org.springframework.security.web.SecurityFilterChain', 'org.springframework.security.config.annotation.web.builders.HttpSecurity' (DefaultWebSecurityCondition)
+ Matched:
+ - found 'session' scope (OnWebApplicationCondition)
+
+ SpringDataRestConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.data.rest.core.config.RepositoryRestConfiguration' (OnClassCondition)
+
+ SpringfoxWebFluxConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ StackdriverMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.stackdriver.StackdriverMeterRegistry' (OnClassCondition)
+
+ StartupEndpointAutoConfiguration:
+ Did not match:
+ - ApplicationStartup configured applicationStartup is of type class org.springframework.core.metrics.DefaultApplicationStartup, expected BufferingApplicationStartup. (StartupEndpointAutoConfiguration.ApplicationStartupCondition)
+
+ StatsdMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.micrometer.statsd.StatsdMeterRegistry' (OnClassCondition)
+
+ Swagger2ControllerWebFlux:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ Swagger2WebFluxConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ SwaggerUiWebFluxConfiguration:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ TaskExecutionAutoConfiguration#applicationTaskExecutor:
+ Did not match:
+ - @ConditionalOnMissingBean (types: java.util.concurrent.Executor; SearchStrategy: all) found beans of type 'java.util.concurrent.Executor' threadPoolTaskExecutor, scheduledExecutorService (OnBeanCondition)
+
+ TaskSchedulingAutoConfiguration#taskScheduler:
+ Did not match:
+ - @ConditionalOnBean (names: org.springframework.context.annotation.internalScheduledAnnotationProcessor; SearchStrategy: all) did not find any beans named org.springframework.context.annotation.internalScheduledAnnotationProcessor (OnBeanCondition)
+
+ ThreadDumpEndpointAutoConfiguration:
+ Did not match:
+ - @ConditionalOnAvailableEndpoint no property management.endpoint.threaddump.enabled found so using endpoint default; @ConditionalOnAvailableEndpoint no 'management.endpoints' property marked it as exposed (OnAvailableEndpointCondition)
+
+ ThymeleafAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.thymeleaf.spring5.SpringTemplateEngine' (OnClassCondition)
+
+ TransactionAutoConfiguration#transactionalOperator:
+ Did not match:
+ - @ConditionalOnSingleCandidate (types: org.springframework.transaction.ReactiveTransactionManager; SearchStrategy: all) did not find any beans (OnBeanCondition)
+
+ TransactionAutoConfiguration.EnableTransactionManagementConfiguration.JdkDynamicAutoProxyConfiguration:
+ Did not match:
+ - @ConditionalOnProperty (spring.aop.proxy-target-class=false) did not find property 'proxy-target-class' (OnPropertyCondition)
+
+ UserDetailsServiceAutoConfiguration:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.security.authentication.AuthenticationManager,org.springframework.security.authentication.AuthenticationProvider,org.springframework.security.core.userdetails.UserDetailsService,org.springframework.security.oauth2.jwt.JwtDecoder,org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector,org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; SearchStrategy: all) found beans of type 'org.springframework.security.authentication.AuthenticationManager' authenticationManagerBean and found beans of type 'org.springframework.security.core.userdetails.UserDetailsService' userDetailsServiceImpl and found beans of type 'org.springframework.security.authentication.AuthenticationProvider' casAuthenticationProvider (OnBeanCondition)
+ Matched:
+ - @ConditionalOnClass found required class 'org.springframework.security.authentication.AuthenticationManager' (OnClassCondition)
+
+ WavefrontMetricsExportAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'com.wavefront.sdk.common.WavefrontSender' (OnClassCondition)
+
+ WebClientAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.function.client.WebClient' (OnClassCondition)
+
+ WebClientMetricsConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.function.client.WebClient' (OnClassCondition)
+
+ WebFluxAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.config.WebFluxConfigurer' (OnClassCondition)
+
+ WebFluxEndpointManagementContextConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.web.reactive.DispatcherHandler' (OnClassCondition)
+
+ WebFluxMetricsAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ WebFluxRequestHandlerProvider:
+ Did not match:
+ - did not find reactive web application classes (OnWebApplicationCondition)
+
+ WebMvcAutoConfiguration#hiddenHttpMethodFilter:
+ Did not match:
+ - @ConditionalOnProperty (spring.mvc.hiddenmethod.filter.enabled) did not find property 'enabled' (OnPropertyCondition)
+
+ WebMvcAutoConfiguration.EnableWebMvcConfiguration#localeResolver:
+ Did not match:
+ - @ConditionalOnMissingBean (names: localeResolver; SearchStrategy: all) found beans named localeResolver (OnBeanCondition)
+
+ WebMvcAutoConfiguration.ResourceChainCustomizerConfiguration:
+ Did not match:
+ - @ConditionalOnEnabledResourceChain did not find class org.webjars.WebJarAssetLocator (OnEnabledResourceChainCondition)
+
+ WebMvcAutoConfiguration.WebMvcAutoConfigurationAdapter#beanNameViewResolver:
+ Did not match:
+ - @ConditionalOnMissingBean (types: org.springframework.web.servlet.view.BeanNameViewResolver; SearchStrategy: all) found beans of type 'org.springframework.web.servlet.view.BeanNameViewResolver' beanNameViewResolver (OnBeanCondition)
+
+ WebServiceTemplateAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.ws.client.core.WebServiceTemplate' (OnClassCondition)
+
+ WebServicesAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.springframework.ws.transport.http.MessageDispatcherServlet' (OnClassCondition)
+
+ WebSocketMessagingAutoConfiguration.WebSocketMessageConverterConfiguration:
+ Did not match:
+ - @ConditionalOnBean (types: org.springframework.web.socket.config.annotation.DelegatingWebSocketMessageBrokerConfiguration,com.fasterxml.jackson.databind.ObjectMapper; SearchStrategy: all) did not find any beans of type org.springframework.web.socket.config.annotation.DelegatingWebSocketMessageBrokerConfiguration (OnBeanCondition)
+ Matched:
+ - @ConditionalOnClass found required classes 'com.fasterxml.jackson.databind.ObjectMapper', 'org.springframework.messaging.simp.config.AbstractMessageBrokerConfiguration' (OnClassCondition)
+
+ WebSocketReactiveAutoConfiguration:
+ Did not match:
+ - @ConditionalOnWebApplication did not find reactive web application classes (OnWebApplicationCondition)
+
+ WebSocketServletAutoConfiguration.Jetty10WebSocketConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required classes 'org.eclipse.jetty.websocket.javax.server.internal.JavaxWebSocketServerContainer', 'org.eclipse.jetty.websocket.server.JettyWebSocketServerContainer' (OnClassCondition)
+
+ WebSocketServletAutoConfiguration.JettyWebSocketConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'org.eclipse.jetty.websocket.jsr356.server.deploy.WebSocketServerContainerInitializer' (OnClassCondition)
+
+ WebSocketServletAutoConfiguration.UndertowWebSocketConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'io.undertow.websockets.jsr.Bootstrap' (OnClassCondition)
+
+ XADataSourceAutoConfiguration:
+ Did not match:
+ - @ConditionalOnClass did not find required class 'javax.transaction.TransactionManager' (OnClassCondition)
+
+
+Exclusions:
+-----------
+
+ org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
+
+
+Unconditional classes:
+----------------------
+
+ org.springframework.boot.autoconfigure.context.ConfigurationPropertiesAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.availability.AvailabilityHealthContributorAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.info.InfoContributorAutoConfiguration
+
+ org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration
+
+ org.springframework.boot.autoconfigure.context.LifecycleAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.metrics.integration.IntegrationMetricsAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.endpoint.EndpointAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.web.server.ManagementContextAutoConfiguration
+
+ org.springframework.boot.actuate.autoconfigure.health.HealthContributorAutoConfiguration
+
+ cn.hutool.extra.spring.SpringUtil
+
+ org.springframework.boot.autoconfigure.availability.ApplicationAvailabilityAutoConfiguration
+
+ org.springframework.boot.autoconfigure.info.ProjectInfoAutoConfiguration
+
+
+
+16:28:32.409 [main] ERROR o.s.b.SpringApplication - [reportFailure,870] - Application run failed
+org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:163)
+ at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:585)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
+ at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:780)
+ at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:453)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:343)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1370)
+ at org.springframework.boot.SpringApplication.run(SpringApplication.java:1359)
+ at com.os.RuoYiApplication.main(RuoYiApplication.java:18)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:108)
+ at org.springframework.boot.loader.Launcher.launch(Launcher.java:58)
+ at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:88)
+Caused by: org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:142)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.(TomcatWebServer.java:104)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getTomcatWebServer(TomcatServletWebServerFactory.java:456)
+ at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getWebServer(TomcatServletWebServerFactory.java:204)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.createWebServer(ServletWebServerApplicationContext.java:182)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:160)
+ ... 16 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jwtAuthenticationTokenFilter': Unsatisfied dependency expressed through field 'tokenService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:214)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:212)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:175)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAsRegistrationBean(ServletContextInitializerBeans.java:170)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addAdaptableBeans(ServletContextInitializerBeans.java:155)
+ at org.springframework.boot.web.servlet.ServletContextInitializerBeans.(ServletContextInitializerBeans.java:87)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.getServletContextInitializerBeans(ServletWebServerApplicationContext.java:260)
+ at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.selfInitialize(ServletWebServerApplicationContext.java:234)
+ at org.springframework.boot.web.embedded.tomcat.TomcatStarter.onStartup(TomcatStarter.java:53)
+ at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:4936)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardHost.startInternal(StandardHost.java:795)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1332)
+ at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1322)
+ at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+ at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75)
+ at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
+ at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:871)
+ at org.apache.catalina.core.StandardEngine.startInternal(StandardEngine.java:249)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardService.startInternal(StandardService.java:428)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.core.StandardServer.startInternal(StandardServer.java:914)
+ at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183)
+ at org.apache.catalina.startup.Tomcat.start(Tomcat.java:486)
+ at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:123)
+ ... 21 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'tokenService': Unsatisfied dependency expressed through field 'redisCache'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 63 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisCache': Unsatisfied dependency expressed through field 'redisTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:713)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:693)
+ at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:408)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 77 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redisTemplate' defined in class path resource [com/os/framework/config/RedisConfig.class]: Unsatisfied dependency expressed through method 'redisTemplate' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:710)
+ ... 91 common frames omitted
+Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'redissonConnectionFactory' defined in class path resource [org/redisson/spring/starter/RedissonAutoConfiguration.class]: Unsatisfied dependency expressed through method 'redissonConnectionFactory' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:794)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:532)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 104 common frames omitted
+Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'redisson' defined in class path resource [com/os/framework/config/RedissonConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:646)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:477)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
+ at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:336)
+ at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:334)
+ at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:209)
+ at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
+ at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
+ at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:904)
+ at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:781)
+ ... 118 common frames omitted
+Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.redisson.api.RedissonClient]: Factory method 'redisson' threw exception; nested exception is org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185)
+ at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:641)
+ ... 132 common frames omitted
+Caused by: org.redisson.client.RedisConnectionException: Unable to connect to Redis server: 127.0.0.1/127.0.0.1:6379
+ at org.redisson.connection.pool.ConnectionPool$1.lambda$run$0(ConnectionPool.java:158)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.connection.pool.ConnectionPool.promiseFailure(ConnectionPool.java:313)
+ at org.redisson.connection.pool.ConnectionPool.lambda$createConnection$3(ConnectionPool.java:279)
+ at org.redisson.misc.RedissonPromise.lambda$onComplete$0(RedissonPromise.java:183)
+ at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583)
+ at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559)
+ at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492)
+ at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636)
+ at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629)
+ at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118)
+ at org.redisson.misc.RedissonPromise.tryFailure(RedissonPromise.java:96)
+ at org.redisson.client.RedisClient$2$1.run(RedisClient.java:242)
+ at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174)
+ at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167)
+ at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:569)
+ at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
+ at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
+ at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: org.redisson.client.RedisException: ERR Client sent AUTH, but no password is set. channel: [id: 0x1b018f6c, L:/127.0.0.1:38772 - R:127.0.0.1/127.0.0.1:6379] command: (AUTH), params: (password masked)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:370)
+ at org.redisson.client.handler.CommandDecoder.decodeCommand(CommandDecoder.java:198)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:137)
+ at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:113)
+ at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:529)
+ at io.netty.handler.codec.ReplayingDecoder.callDecode(ReplayingDecoder.java:366)
+ at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:290)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
+ at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
+ at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
+ at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
+ at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
+ at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
+ at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
+ ... 4 common frames omitted
+[root@localhost tao_iot]#
+[root@localhost tao_iot]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: activating (auto-restart) since Sun 2025-04-27 16:31:38 CST; 2s ago
+ Process: 9390 ExecStart=/media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf (code=exited, status=0/SUCCESS)
+ Main PID: 9390 (code=exited, status=0/SUCCESS)
+[root@localhost tao_iot]#
+[root@localhost src]# kill 6910
+[root@localhost src]# /media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf
+[root@localhost src]# redis-cli -h 10.42.0.1 -p 6379 ping
+(error) NOAUTH Authentication required.
+[root@localhost src]# redis-cli -h 127.0.0.1 -p 6379 ping
+(error) NOAUTH Authentication required.
+[root@localhost src]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: activating (auto-restart) since Sun 2025-04-27 16:44:40 CST; 1s ago
+ Process: 10185 ExecStart=/media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf (code=exited, status=0/SUCCESS)
+ Main PID: 10185 (code=exited, status=0/SUCCESS)
+[root@localhost src]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: activating (auto-restart) since Sun 2025-04-27 16:44:40 CST; 1s ago
+ Process: 10185 ExecStart=/media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf (code=exited, status=0/SUCCESS)
+ Main PID: 10185 (code=exited, status=0/SUCCESS)
+[root@localhost src]# redis-cli -h 10.42.0.1 -p 6379 -a haiwei@123 ping
+Warning: Using a password with '-a' or '-u' option on the command line interface may not be safe.
+PONG
+[root@localhost src]#
+
+[root@localhost tao_iot]# vi /media/nginx-server/nginx-1.28.0/conf/nginx.conf
+[root@localhost tao_iot]# firewall-cmd --permanent --add-port=6061/tcp
+success
+[root@localhost tao_iot]# firewall-cmd --permanent --add-port=8020/tcp
+success
+[root@localhost tao_iot]# firewall-cmd --reload
+success
+[root@localhost tao_iot]# rm dist.zip
+rm:是否删除普通文件 'dist.zip'?y
+[root@localhost tao_iot]# firewall-cmd --reload
+success
+[root@localhost tao_iot]# systemctl daemon-reload
+[root@localhost tao_iot]# systemctl status nginx
+● nginx.service - nginx service
+ Loaded: loaded (/usr/lib/systemd/system/nginx.service; enabled; vendor preset: disabled)
+ Active: active (running) since Sun 2025-04-27 15:18:06 CST; 2h 20min ago
+ Main PID: 1953 (nginx)
+ Tasks: 2
+ Memory: 7.6M
+ CGroup: /system.slice/nginx.service
+ ├─1953 nginx: master process /usr/local/nginx/sbin/nginx
+ └─1954 nginx: worker process
+
+4月 27 15:18:06 localhost.localdomain systemd[1]: Starting nginx service...
+4月 27 15:18:06 localhost.localdomain systemd[1]: Started nginx service.
+[root@localhost tao_iot]# [root@localhost tao_iot]# getenforce
+Disabled
+[root@localhost tao_iot]# curl -I http://localhost:6061
+curl: (7) Failed to connect to localhost port 6061: 拒绝连接
+[root@localhost tao_iot]# chown -R nginx:nginx /media/tao_iot/dist
+[root@localhost tao_iot]# chmod -R 755 /media/tao_iot/dist
+[root@localhost tao_iot]# curl -I http://localhost:6061
+curl: (7) Failed to connect to localhost port 6061: 拒绝连接
+[root@localhost tao_iot]# getenforce
+Disabled
+[root@localhost tao_iot]# systemctl status nginx
+● nginx.service - nginx service
+ Loaded: loaded (/usr/lib/systemd/system/nginx.service; enabled; vendor preset: disabled)
+ Active: active (running) since Sun 2025-04-27 15:18:06 CST; 2h 20min ago
+ Main PID: 1953 (nginx)
+ Tasks: 2
+ Memory: 7.6M
+ CGroup: /system.slice/nginx.service
+ ├─1953 nginx: master process /usr/local/nginx/sbin/nginx
+ └─1954 nginx: worker process
+
+4月 27 15:18:06 localhost.localdomain systemd[1]: Starting nginx service...
+4月 27 15:18:06 localhost.localdomain systemd[1]: Started nginx service.
+[root@localhost tao_iot]# ^C
+[root@localhost tao_iot]# getenforce
+Disabled
+[root@localhost tao_iot]# curl -I http://localhost:6061
+curl: (7) Failed to connect to localhost port 6061: 拒绝连接
+[root@localhost tao_iot]# chown -R nginx:nginx /media/tao_iot/dist
+[root@localhost tao_iot]# chmod -R 755 /media/tao_iot/dist
+[root@localhost tao_iot]# curl -I http://localhost:6061
+curl: (7) Failed to connect to localhost port 6061: 拒绝连接
+[root@localhost tao_iot]# getenforce
+Disabled
+[root@localhost tao_iot]# netstat -tulnp | grep nginx
+tcp 0 0 0.0.0.0:80 0.0.0.0:* LISTEN 1953/nginx: master
+[root@localhost tao_iot]# ss -tulnp | grep nginx
+tcp LISTEN 0 511 0.0.0.0:80 0.0.0.0:* users:(("nginx",pid=1954,fd=9),("nginx",pid=1953,fd=9))
+[root@localhost tao_iot]# nginx -t
+-bash: nginx:未找到命令
+[root@localhost tao_iot]# firewall-cmd --list-all
+public (active)
+ target: default
+ icmp-block-inversion: no
+ interfaces: enp12s0f0 enp5s0u2
+ sources:
+ services: cockpit dhcpv6-client mdns ssh
+ ports: 2379/tcp 8084/tcp 4000/tcp 7001/tcp 6379/tcp 6061/tcp 8020/tcp
+ protocols:
+ masquerade: no
+ forward-ports:
+ source-ports:
+ icmp-blocks:
+ rich rules:
+
+[root@localhost tao_iot]# firewall-cmd --permanent --add-port=6061/tcp
+Warning: ALREADY_ENABLED: 6061:tcp
+success
+[root@localhost tao_iot]# curl -I http://10.42.0.1:6061
+curl: (7) Failed to connect to 10.42.0.1 port 6061: 拒绝连接
+[root@localhost tao_iot]# lsof -i :6061
+[root@localhost tao_iot]#
\ No newline at end of file
diff --git a/麒麟系统环境部署命令/4.txt b/麒麟系统环境部署命令/4.txt
new file mode 100644
index 0000000..723d4f3
--- /dev/null
+++ b/麒麟系统环境部署命令/4.txt
@@ -0,0 +1,490 @@
+
+Authorized users only. All activities may be monitored and reported.
+Activate the web console with: systemctl enable --now cockpit.socket
+
+Last login: Sun Apr 27 17:57:43 2025 from 10.42.0.12
+[root@localhost ~]# tiup cluster display tidb-cluster
+Cluster type: tidb
+Cluster name: tidb-cluster
+Cluster version: v8.5.1
+Deploy user: tidb
+SSH type: builtin
+Dashboard URL: http://10.42.0.1:2379/dashboard
+Grafana URL: http://10.42.0.1:3000
+ID Role Host Ports OS/Arch Status Data Dir Deploy Dir
+-- ---- ---- ----- ------- ------ -------- ----------
+10.42.0.1:9093 alertmanager 10.42.0.1 9093/9094 linux/aarch64 Up /tidb-data/alertmanager-9093 /tidb-deploy/alertmanager-9093
+10.42.0.1:3000 grafana 10.42.0.1 3000 linux/aarch64 Up - /tidb-deploy/grafana-3000
+10.42.0.1:2379 pd 10.42.0.1 2379/2380 linux/aarch64 Up|L|UI /tidb-data/pd-2379 /tidb-deploy/pd-2379
+10.42.0.1:9090 prometheus 10.42.0.1 9090/12020 linux/aarch64 Up /tidb-data/prometheus-9090 /tidb-deploy/prometheus-9090
+10.42.0.1:4000 tidb 10.42.0.1 4000/10080 linux/aarch64 Up - /tidb-deploy/tidb-4000
+10.42.0.1:20160 tikv 10.42.0.1 20160/20180 linux/aarch64 Up /tidb-data/tikv-20160 /tidb-deploy/tikv-20160
+Total nodes: 6
+[root@localhost ~]# ps -ef | grep tidb
+tidb 2005 1 0 13:46 ? 00:00:00 bin/alertmanager/alertmanager --config.file=conf/alertmanager.yml --storage.path=/tidb-data/alertmanager-9093 --data.retention=120h --log.level=info --web.listen-address=0.0.0.0:9093 --web.external-url=http://10.42.0.1:9093 --cluster.peer=10.42.0.1:9094 --cluster.listen-address=10.42.0.1:9094
+tidb 2006 1 0 13:46 ? 00:00:00 bin/blackbox_exporter/blackbox_exporter --web.listen-address=:9115 --log.level=info --config.file=conf/blackbox.yml
+tidb 2007 1 5 13:46 ? 00:00:14 bin/bin/grafana-server --homepath=/tidb-deploy/grafana-3000/bin --config=/tidb-deploy/grafana-3000/conf/grafana.ini
+tidb 2009 1 0 13:46 ? 00:00:01 bin/node_exporter/node_exporter --web.listen-address=:9100 --collector.tcpstat --collector.mountstats --collector.meminfo_numa --collector.buddyinfo --collector.vmstat.fields=^.* --log.level=info
+tidb 2010 1 22 13:46 ? 00:00:54 bin/pd-server --name=pd-10.42.0.1-2379 --client-urls=http://0.0.0.0:2379 --advertise-client-urls=http://10.42.0.1:2379 --peer-urls=http://0.0.0.0:2380 --advertise-peer-urls=http://10.42.0.1:2380 --data-dir=/tidb-data/pd-2379 --initial-cluster=pd-10.42.0.1-2379=http://10.42.0.1:2380 --config=conf/pd.toml --log-file=/tidb-deploy/pd-2379/log/pd.log
+tidb 2011 1 24 13:46 ? 00:00:58 bin/prometheus/prometheus --config.file=/tidb-deploy/prometheus-9090/conf/prometheus.yml --web.listen-address=:9090 --web.external-url=http://10.42.0.1:9090/ --web.enable-admin-api --log.level=info --storage.tsdb.path=/tidb-data/prometheus-9090 --storage.tsdb.retention=30d
+tidb 2013 1 6 13:46 ? 00:00:16 bin/tidb-server -P 4000 --status=10080 --host=0.0.0.0 --advertise-address=10.42.0.1 --store=tikv --initialize-insecure --path=10.42.0.1:2379 --log-slow-query=/tidb-deploy/tidb-4000/log/tidb_slow_query.log --config=conf/tidb.toml --log-file=/tidb-deploy/tidb-4000/log/tidb.log
+tidb 2014 1 10 13:46 ? 00:00:24 bin/tikv-server --addr 0.0.0.0:20160 --advertise-addr 10.42.0.1:20160 --status-addr 0.0.0.0:20180 --advertise-status-addr 10.42.0.1:20180 --pd 10.42.0.1:2379 --data-dir /tidb-data/tikv-20160 --config conf/tikv.toml --log-file /tidb-deploy/tikv-20160/log/tikv.log
+tidb 2018 2005 0 13:46 ? 00:00:00 /bin/bash /tidb-deploy/alertmanager-9093/scripts/run_alertmanager.sh
+tidb 2019 2018 0 13:46 ? 00:00:00 tee -i -a /tidb-deploy/alertmanager-9093/log/alertmanager.log
+tidb 2024 2006 0 13:46 ? 00:00:00 /bin/bash /tidb-deploy/monitor-9100/scripts/run_blackbox_exporter.sh
+tidb 2026 2009 0 13:46 ? 00:00:00 /bin/bash /tidb-deploy/monitor-9100/scripts/run_node_exporter.sh
+tidb 2027 2024 0 13:46 ? 00:00:00 tee -i -a /tidb-deploy/monitor-9100/log/blackbox_exporter.log
+tidb 2028 2026 0 13:46 ? 00:00:00 tee -i -a /tidb-deploy/monitor-9100/log/node_exporter.log
+tidb 2030 2011 0 13:46 ? 00:00:00 /bin/bash scripts/ng-wrapper.sh
+tidb 2031 2011 0 13:46 ? 00:00:00 /bin/bash /tidb-deploy/prometheus-9090/scripts/run_prometheus.sh
+tidb 2032 2031 0 13:46 ? 00:00:00 tee -i -a /tidb-deploy/prometheus-9090/log/prometheus.log
+tidb 2035 2030 1 13:46 ? 00:00:04 bin/ng-monitoring-server --config /tidb-deploy/prometheus-9090/conf/ngmonitoring.toml
+root 4537 4346 0 13:51 pts/0 00:00:00 grep tidb
+[root@localhost ~]# systemctl status redis
+● redis.service - Redis In-Memory Data Store
+ Loaded: loaded (/etc/systemd/system/redis.service; enabled; vendor preset: disabled)
+ Active: active (running) since Tue 2025-04-29 13:46:59 CST; 4min 11s ago
+ Process: 1981 ExecStart=/media/redis/redis-5.0.5/src/redis-server /media/redis/redis-5.0.5/redis.conf > Main PID: 1989 (redis-server)
+ Tasks: 4
+ Memory: 13.8M
+ CGroup: /system.slice/redis.service
+ └─1989 /media/redis/redis-5.0.5/src/redis-server 127.0.0.1:6379
+
+4月 29 13:46:59 localhost.localdomain systemd[1]: Starting Redis In-Memory Data Store...
+4月 29 13:46:59 localhost.localdomain systemd[1]: Started Redis In-Memory Data Store.
+lines 1-12/12 (END)
+
+[root@localhost ~]# rm /media/tao_iot/dist
+rm: 无法删除 '/media/tao_iot/dist': 是一个目录
+[root@localhost ~]# systemctl status nginx
+● nginx.service - nginx service
+ Loaded: loaded (/usr/lib/systemd/system/nginx.service; enabled; vendor preset: disabled)
+ Active: active (running) since Tue 2025-04-29 13:46:59 CST; 7min ago
+ Process: 1980 ExecStart=/usr/local/nginx/sbin/nginx (code=exited, status=0/SUCCESS)
+ Main PID: 1988 (nginx)
+ Tasks: 2
+ Memory: 7.5M
+ CGroup: /system.slice/nginx.service
+ ├─1988 nginx: master process /usr/local/nginx/sbin/nginx
+ └─1993 nginx: worker process
+
+4月 29 13:46:59 localhost.localdomain systemd[1]: Starting nginx service...
+4月 29 13:46:59 localhost.localdomain systemd[1]: Started nginx service.
+[root@localhost ~]# rm -r /media/tao_iot/dist
+rm:是否进入目录'/media/tao_iot/dist'? y
+rm:是否删除普通文件 '/media/tao_iot/dist/favicon.ico'?y
+rm:是否进入目录'/media/tao_iot/dist/html'? y
+rm:是否删除普通文件 '/media/tao_iot/dist/html/ie.html'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/html/ie.html.gz'?y
+rm:是否删除目录 '/media/tao_iot/dist/html'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/index.html'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/index.html.gz'?y
+rm:是否进入目录'/media/tao_iot/dist/model'? y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/daxingjichang.mtl'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/daxingjichang.obj'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/jiaodongjichang.mtl'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/jiaodongjichang.obj'?y
+rm:是否进入目录'/media/tao_iot/dist/model/maps'? y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/maps/3d66Model-1176892-files-21.JPG'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/maps/3d66Model-1176892-files-23.jpg'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/maps/3d66Model-1176892-files-3.jpg'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/model/maps/wenli2.png'?y
+rm:是否删除目录 '/media/tao_iot/dist/model/maps'?y
+rm:是否删除目录 '/media/tao_iot/dist/model'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/robots.txt'?y
+rm:是否进入目录'/media/tao_iot/dist/static'? y
+rm:是否进入目录'/media/tao_iot/dist/static/css'? y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/app.94234889.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/app.94234889.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-05d75466.9dad4dc5.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-05d75466.9dad4dc5.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-082d8c34.49e9b769.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-082d8c34.49e9b769.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-10f4b0e8.772dd061.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-10f4b0e8.772dd061.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-17ee1abe.ecea2c5f.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-17ee1abe.ecea2c5f.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-18f96050.03c3a260.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-2025613a.d63b57ad.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-26395caf.470fc1fa.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-26395caf.470fc1fa.css.gz'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-291854d3.da0f15ef.css'?y
+rm:是否删除普通文件 '/media/tao_iot/dist/static/css/chunk-291854d3.da0f15ef.css.gz'?^C
+[root@localhost ~]# rm -rf /media/tao_iot/dist
+[root@localhost ~]# rm -rf /media/tao_iot/haiwei-admin.jar
+[root@localhost ~]# mkdir -p /media/tao_iot/dist
+[root@localhost ~]# unzip -oq dist.zip -d /media/tao_iot/dist
+unzip: cannot find or open dist.zip, dist.zip.zip or dist.zip.ZIP.
+[root@localhost ~]# cd /media/tao_iot
+[root@localhost tao_iot]# unzip -oq dist.zip -d /media/tao_iot/dist
+[root@localhost tao_iot]# systemctl status nginx
+● nginx.service - nginx service
+ Loaded: loaded (/usr/lib/systemd/system/nginx.service; enabled; vendor preset: disabled)
+ Active: active (running) since Tue 2025-04-29 13:46:59 CST; 29min ago
+ Process: 1980 ExecStart=/usr/local/nginx/sbin/nginx (code=exited, status=0/SUCCESS)
+ Main PID: 1988 (nginx)
+ Tasks: 2
+ Memory: 9.5M
+ CGroup: /system.slice/nginx.service
+ ├─1988 nginx: master process /usr/local/nginx/sbin/nginx
+ └─1993 nginx: worker process
+
+4月 29 13:46:59 localhost.localdomain systemd[1]: Starting nginx service...
+4月 29 13:46:59 localhost.localdomain systemd[1]: Started nginx service.
+[root@localhost tao_iot]# systemctl daemon-reload
+[root@localhost tao_iot]# cat /usr/local/nginx/conf/nginx.conf
+
+#user nobody;
+worker_processes 1;
+
+#error_log logs/error.log;
+#error_log logs/error.log notice;
+#error_log logs/error.log info;
+
+#pid logs/nginx.pid;
+
+
+events {
+ worker_connections 1024;
+}
+
+
+http {
+ include mime.types;
+ default_type application/octet-stream;
+
+ #log_format main '$remote_addr - $remote_user [$time_local] "$request" '
+ # '$status $body_bytes_sent "$http_referer" '
+ # '"$http_user_agent" "$http_x_forwarded_for"';
+
+ #access_log logs/access.log main;
+
+ sendfile on;
+ #tcp_nopush on;
+
+ #keepalive_timeout 0;
+ keepalive_timeout 65;
+
+ #gzip on;
+ #gzip on;
+ # 开启gzip压缩
+ gzip on;
+ # 不压缩临界值,大于1K的才压缩,一般不用改
+ gzip_min_length 1k;
+ # 压缩缓冲区
+ gzip_buffers 16 64K;
+ # 压缩版本(默认1.1,前端如果是squid2.5请使用1.0)
+ gzip_http_version 1.1;
+ # 压缩级别,1-10,数字越大压缩的越好,时间也越长
+ gzip_comp_level 3;
+ # 进行压缩的文件类型
+ gzip_types text/plain application/x-javascript text/css application/xml application/javascript;
+ # 跟Squid等缓存服务有关,on的话会在Header里增加"Vary: Accept-Encoding"
+ gzip_vary on;
+ # IE6对Gzip不怎么友好,不给它Gzip了
+ gzip_disable "MSIE [1-6]\.";
+
+ server {
+ listen 6061;
+ server_name localhost;
+
+ #charset koi8-r;
+
+ #access_log logs/host.access.log main;
+
+ location / {
+ root /media/tao_iot/dist;
+ index index.html index.htm;
+ }
+
+ #error_page 404 /404.html;
+
+ # redirect server error pages to the static page /50x.html
+ #
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root html;
+ }
+
+ location /prod-api/ {
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header REMOTE-HOST $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-NginX-Proxy true;
+ proxy_pass http://127.0.0.1:8020/;
+
+ }
+
+ # proxy the PHP scripts to Apache listening on 127.0.0.1:80
+ #
+ #location ~ \.php$ {
+ # proxy_pass http://127.0.0.1;
+ #}
+
+ # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
+ #
+ #location ~ \.php$ {
+ # root html;
+ # fastcgi_pass 127.0.0.1:9000;
+ # fastcgi_index index.php;
+ # fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
+ # include fastcgi_params;
+ #}
+
+ # deny access to .htaccess files, if Apache's document root
+ # concurs with nginx's one
+ #
+ #location ~ /\.ht {
+ # deny all;
+ #}
+ }
+
+
+ # another virtual host using mix of IP-, name-, and port-based configuration
+ #
+ #server {
+ # listen 8000;
+ # listen somename:8080;
+ # server_name somename alias another.alias;
+
+ # location / {
+ # root html;
+ # index index.html index.htm;
+ # }
+ #}
+
+
+ # HTTPS server
+ #
+ #server {
+ # listen 443 ssl;
+ # server_name localhost;
+
+ # ssl_certificate cert.pem;
+ # ssl_certificate_key cert.key;
+
+ # ssl_session_cache shared:SSL:1m;
+ # ssl_session_timeout 5m;
+
+ # ssl_ciphers HIGH:!aNULL:!MD5;
+ # ssl_prefer_server_ciphers on;
+
+ # location / {
+ # root html;
+ # index index.html index.htm;
+ # }
+ #}
+
+}
+[root@localhost tao_iot]# /usr/local/nginx/sbin/nginx -s reload # 重新加载配置
+[root@localhost tao_iot]# ps aux | grep nginx
+root 1988 0.0 0.0 20224 3648 ? Ss 13:46 0:00 nginx: master process /usr/local/nginx/sbin/nginx
+nobody 5920 0.0 0.0 32256 4736 ? S 14:21 0:00 nginx: worker process
+root 6244 0.0 0.0 214144 1600 pts/0 S+ 14:22 0:00 grep nginx
+[root@localhost tao_iot]# ls -la /media/tao_iot/dist/index.html
+-rw-r--r-- 1 root root 16667 4月 29 13:49 /media/tao_iot/dist/index.html
+[root@localhost tao_iot]# getenforce
+Disabled
+[root@localhost tao_iot]# vi /etc/systemd/system/haiwei-admin.service
+E325: ATTENTION
+Found a swap file by the name "/etc/systemd/system/.haiwei-admin.service.swp"
+ owned by: root dated: 日 4月 27 17:57:40 2025
+ file name: /etc/systemd/system/haiwei-admin.service
+ modified: YES
+ user name: root host name: localhost.localdomain
+ process ID: 14448
+While opening file "/etc/systemd/system/haiwei-admin.service"
+ dated: 二 4月 29 14:25:17 2025
+ NEWER than swap file!
+
+(1) Another program may be editing the same file. If this is the case,
+ be careful not to end up with two different instances of the same
+ file when making changes. Quit, or continue with caution.
+(2) An edit session for this file crashed.
+ If this is the case, use ":recover" or "vim -r /etc/systemd/system/haiwei-admin.service"
+ to recover the changes (see ":help recovery").
+ If you did this already, delete the swap file "/etc/systemd/system/.haiwei-admin.service.swp"
+ to avoid this message.
+"/etc/systemd/system/haiwei-admin.service" 15L, 277C
+Press ENTER or type command to continue
+
+[root@localhost tao_iot]# vi /etc/systemd/system/haiwei-admin.service
+[root@localhost tao_iot]# rm /etc/systemd/system/.haiwei-admin.service.swp
+rm:是否删除普通文件 '/etc/systemd/system/.haiwei-admin.service.swp'?y
+[root@localhost tao_iot]# vi /etc/systemd/system/haiwei-admin.service
+[root@localhost tao_iot]# vi /etc/systemd/system/haiwei-admin.service
+[root@localhost tao_iot]# chmod 644 /etc/systemd/system/haiwei-admin.service
+[root@localhost tao_iot]# systemctl daemon-reload
+[root@localhost tao_iot]# systemctl enable haiwei-admin.service
+Created symlink /etc/systemd/system/multi-user.target.wants/haiwei-admin.service → /etc/systemd/system/haiwei-admin.service.
+[root@localhost tao_iot]# systemctl status haiwei-admin.service
+● haiwei-admin.service - Haiwei Admin Service
+ Loaded: loaded (/etc/systemd/system/haiwei-admin.service; enabled; vendor preset: disabled)
+ Active: inactive (dead)
+[root@localhost tao_iot]# systemctl start haiwei-admin.service
+[root@localhost tao_iot]# systemctl status haiwei-admin.service
+● haiwei-admin.service - Haiwei Admin Service
+ Loaded: loaded (/etc/systemd/system/haiwei-admin.service; enabled; vendor preset: disabled)
+ Active: active (running) since Tue 2025-04-29 14:28:55 CST; 4s ago
+ Main PID: 6399 (java)
+ Tasks: 112
+ Memory: 481.6M
+ CGroup: /system.slice/haiwei-admin.service
+ └─6399 /usr/bin/java -jar /media/tao_iot/haiwei-admin.jar
+
+4月 29 14:28:57 localhost.localdomain java[6399]: // \ \ `-. \_ __\ /__ _/ .-` / / >4月 29 14:28:57 localhost.localdomain java[6399]: // ========`-.____`-.___\_____/___.-`____.-'=====>4月 29 14:28:57 localhost.localdomain java[6399]: // `=---=' >4月 29 14:28:57 localhost.localdomain java[6399]: // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^>4月 29 14:28:57 localhost.localdomain java[6399]: // 佛祖保佑 永不宕机 永无BUG >4月 29 14:28:57 localhost.localdomain java[6399]: //////////////////////////////////////////////////////>4月 29 14:28:57 localhost.localdomain java[6399]: 14:28:57.724 [main] INFO c.o.RuoYiApplication - [logS>4月 29 14:28:57 localhost.localdomain java[6399]: 14:28:57.731 [main] DEBUG c.o.RuoYiApplication - [logS>4月 29 14:28:57 localhost.localdomain java[6399]: 14:28:57.732 [background-preinit] INFO o.h.v.i.util.V>4月 29 14:28:57 localhost.localdomain java[6399]: 14:28:57.733 [main] INFO c.o.RuoYiApplication - [logS>
+
+[root@localhost tao_iot]# cat /usr/local/nginx/conf/nginx.conf
+
+#user nobody;
+worker_processes 1;
+
+#error_log logs/error.log;
+#error_log logs/error.log notice;
+#error_log logs/error.log info;
+
+#pid logs/nginx.pid;
+
+
+events {
+ worker_connections 1024;
+}
+
+
+http {
+ include mime.types;
+ default_type application/octet-stream;
+
+ #log_format main '$remote_addr - $remote_user [$time_local] "$request" '
+ # '$status $body_bytes_sent "$http_referer" '
+ # '"$http_user_agent" "$http_x_forwarded_for"';
+
+ #access_log logs/access.log main;
+
+ sendfile on;
+ #tcp_nopush on;
+
+ #keepalive_timeout 0;
+ keepalive_timeout 65;
+
+ #gzip on;
+ #gzip on;
+ # 开启gzip压缩
+ gzip on;
+ # 不压缩临界值,大于1K的才压缩,一般不用改
+ gzip_min_length 1k;
+ # 压缩缓冲区
+ gzip_buffers 16 64K;
+ # 压缩版本(默认1.1,前端如果是squid2.5请使用1.0)
+ gzip_http_version 1.1;
+ # 压缩级别,1-10,数字越大压缩的越好,时间也越长
+ gzip_comp_level 3;
+ # 进行压缩的文件类型
+ gzip_types text/plain application/x-javascript text/css application/xml application/javascript;
+ # 跟Squid等缓存服务有关,on的话会在Header里增加"Vary: Accept-Encoding"
+ gzip_vary on;
+ # IE6对Gzip不怎么友好,不给它Gzip了
+ gzip_disable "MSIE [1-6]\.";
+
+ server {
+ listen 6061;
+ server_name localhost;
+
+ #charset koi8-r;
+
+ #access_log logs/host.access.log main;
+
+ location / {
+ root /media/tao_iot/dist;
+ index index.html index.htm;
+ }
+
+ #error_page 404 /404.html;
+
+ # redirect server error pages to the static page /50x.html
+ #
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root html;
+ }
+
+ location /prod-api/ {
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header REMOTE-HOST $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-NginX-Proxy true;
+ proxy_pass http://127.0.0.1:8020/;
+
+ }
+
+ # proxy the PHP scripts to Apache listening on 127.0.0.1:80
+ #
+ #location ~ \.php$ {
+ # proxy_pass http://127.0.0.1;
+ #}
+
+ # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
+ #
+ #location ~ \.php$ {
+ # root html;
+ # fastcgi_pass 127.0.0.1:9000;
+ # fastcgi_index index.php;
+ # fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
+ # include fastcgi_params;
+ #}
+
+ # deny access to .htaccess files, if Apache's document root
+ # concurs with nginx's one
+ #
+ #location ~ /\.ht {
+ # deny all;
+ #}
+ }
+
+
+ # another virtual host using mix of IP-, name-, and port-based configuration
+ #
+ #server {
+ # listen 8000;
+ # listen somename:8080;
+ # server_name somename alias another.alias;
+
+ # location / {
+ # root html;
+ # index index.html index.htm;
+ # }
+ #}
+
+
+ # HTTPS server
+ #
+ #server {
+ # listen 443 ssl;
+ # server_name localhost;
+
+ # ssl_certificate cert.pem;
+ # ssl_certificate_key cert.key;
+
+ # ssl_session_cache shared:SSL:1m;
+ # ssl_session_timeout 5m;
+
+ # ssl_ciphers HIGH:!aNULL:!MD5;
+ # ssl_prefer_server_ciphers on;
+
+ # location / {
+ # root html;
+ # index index.html index.htm;
+ # }
+ #}
+
+}
+[root@localhost tao_iot]#
+[root@localhost nginx]# /usr/local/nginx/sbin/nginx -t # 检查配置是否有语法错误
+nginx: the configuration file /usr/local/nginx/conf/nginx.conf syntax is ok
+nginx: configuration file /usr/local/nginx/conf/nginx.conf test is successful
+[root@localhost nginx]# /usr/local/nginx/sbin/nginx -s reload # 重新加载配置
+[root@localhost nginx]#
\ No newline at end of file