Allen/K8s-milvus向量数据库搭建记录

Created Sun, 01 Feb 2026 14:54:52 +0800 Modified Sun, 01 Feb 2026 16:32:54 +0800
2342 Words

录一下在k8s集群内搭建milvus向量数据库。

k8s高可用集群搭建与功能测试

  • AI很火,就也来凑凑热闹。milvus向量数据库风头挺火,搞一个看看

搭建环境说明

  • 全部k8s节点操作系统采用Debian GNU/Linux 12 (bookworm),内核6.1.0-18-amd64。部署过程不再重复,可参考之前文章。
主机名 IP CPU/内存 硬盘
k8s-master1 192.168.1.151 4C8G 2 * 50G
k8s-master2 192.168.1.152 4C8G 2 * 50G
k8s-master3 192.168.1.153 4C8G 2 * 50G
k8s-node1 192.168.1.154 4C8G 2 * 50G
k8s-node2 192.168.1.155 4C8G 2 * 50G
k8s-node3 192.168.1.156 4C8G 2 * 50G
k8s-nfs 192.168.1.160 4C8G 1 * 50G + 1 * 200G

部署k8s并准备NFS

root@k8s-master1:~# kubectl get nodes -owide
NAME          STATUS   ROLES           AGE   VERSION   INTERNAL-IP     EXTERNAL-IP   OS-IMAGE                         KERNEL-VERSION   CONTAINER-RUNTIME
k8s-master1   Ready    control-plane   13d   v1.34.3   192.168.1.151   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1
k8s-master2   Ready    control-plane   13d   v1.34.3   192.168.1.152   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1
k8s-master3   Ready    control-plane   13d   v1.34.3   192.168.1.153   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1
k8s-node1     Ready    worker          13d   v1.34.3   192.168.1.154   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1
k8s-node2     Ready    worker          13d   v1.34.3   192.168.1.155   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1
k8s-node3     Ready    worker          13d   v1.34.3   192.168.1.156   <none>        Debian GNU/Linux 12 (bookworm)   6.1.0-18-amd64   containerd://2.2.1



root@k8s-master1:~# kubectl get -f rbac.yaml -o wide
NAME                                    SECRETS   AGE
serviceaccount/nfs-client-provisioner   0         13d

NAME                                                                  CREATED AT
clusterrole.rbac.authorization.k8s.io/nfs-client-provisioner-runner   2026-01-18T13:41:39Z

NAME                                                                      ROLE                                        AGE   USERS   GROUPS   SERVICEACCOUNTS
clusterrolebinding.rbac.authorization.k8s.io/run-nfs-client-provisioner   ClusterRole/nfs-client-provisioner-runner   13d                    default/nfs-client-provisioner

NAME                                                                   CREATED AT
role.rbac.authorization.k8s.io/leader-locking-nfs-client-provisioner   2026-01-18T13:41:39Z

NAME                                                                          ROLE                                         AGE   USERS   GROUPS   SERVICEACCOUNTS
rolebinding.rbac.authorization.k8s.io/leader-locking-nfs-client-provisioner   Role/leader-locking-nfs-client-provisioner   13d                    default/nfs-client-provisioner



root@k8s-master1:~# kubectl get deployment -o wide
NAME                              READY   UP-TO-DATE   AVAILABLE   AGE     CONTAINERS               IMAGES                                                                           SELECTOR
my-attu                           1/1     1            1           6d22h   attu                     swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/zilliz/attu:v2.6.4            app=attu
my-release-milvus-datanode        1/1     1            1           7d18h   datanode                 swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=datanode,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus
my-release-milvus-mixcoord        1/1     1            1           7d18h   mixcoord                 swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=mixcoord,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus
my-release-milvus-proxy           1/1     1            1           7d18h   proxy                    swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=proxy,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus
my-release-milvus-querynode-0     1/1     1            1           7d18h   querynode                swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=querynode,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus,milvus.io/querynode-group-id=0
my-release-milvus-querynode-1     0/0     0            0           7d18h   querynode                dummy                                                                            app.kubernetes.io/component=querynode,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus,milvus.io/querynode-group-id=1
my-release-milvus-standalone      0/0     0            0           7d18h   standalone               swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=standalone,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus
my-release-milvus-streamingnode   1/1     1            1           7d18h   streamingnode            swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9        app.kubernetes.io/component=streamingnode,app.kubernetes.io/instance=my-release,app.kubernetes.io/managed-by=milvus-operator,app.kubernetes.io/name=milvus
nfs-client-provisioner            1/1     1            1           13d     nfs-client-provisioner   registry.cn-beijing.aliyuncs.com/pylixm/nfs-subdir-external-provisioner:v4.0.0   app=nfs-client-provisioner
root@k8s-master1:~# kubectl get pods -owide
NAME                                               READY   STATUS    RESTARTS   AGE     IP               NODE        NOMINATED NODE   READINESS GATES
my-attu-785b784687-jh8bh                           1/1     Running   1          6d22h   10.244.107.212   k8s-node3   <none>           <none>
my-release-etcd-0                                  1/1     Running   2          7d18h   10.244.36.86     k8s-node1   <none>           <none>
my-release-etcd-1                                  1/1     Running   2          7d18h   10.244.169.157   k8s-node2   <none>           <none>
my-release-etcd-2                                  1/1     Running   2          7d18h   10.244.107.213   k8s-node3   <none>           <none>
my-release-milvus-datanode-659c998467-6728j        1/1     Running   4          7d18h   10.244.107.216   k8s-node3   <none>           <none>
my-release-milvus-mixcoord-5f7765dd4f-nxbnn        1/1     Running   6          7d18h   10.244.169.156   k8s-node2   <none>           <none>
my-release-milvus-proxy-7564dd898f-4n8cl           1/1     Running   4          7d18h   10.244.107.215   k8s-node3   <none>           <none>
my-release-milvus-querynode-0-7684886cfb-qrpnd     1/1     Running   7          7d18h   10.244.169.151   k8s-node2   <none>           <none>
my-release-milvus-streamingnode-67cf4664df-qpzf8   1/1     Running   4          7d18h   10.244.107.211   k8s-node3   <none>           <none>
my-release-minio-0                                 1/1     Running   2          7d18h   10.244.107.217   k8s-node3   <none>           <none>
my-release-minio-1                                 1/1     Running   2          7d18h   10.244.169.153   k8s-node2   <none>           <none>
my-release-minio-2                                 1/1     Running   2          7d18h   10.244.36.87     k8s-node1   <none>           <none>
my-release-minio-3                                 1/1     Running   2          7d18h   10.244.36.88     k8s-node1   <none>           <none>
nfs-client-provisioner-784b8b5966-rh6g6            1/1     Running   1          6d23h   10.244.107.214   k8s-node3   <none>           <none>
root@k8s-master1:~# kubectl describe pods nfs-client-provisioner-784b8b5966-rh6g6
Name:             nfs-client-provisioner-784b8b5966-rh6g6
Namespace:        default
Priority:         0
Service Account:  nfs-client-provisioner
Node:             k8s-node3/192.168.1.156
Start Time:       Sun, 25 Jan 2026 16:07:05 +0800
Labels:           app=nfs-client-provisioner
                  pod-template-hash=784b8b5966
Annotations:      cni.projectcalico.org/containerID: 7885c566ce66cf3e18f17f3c42ba4df9e2dfbf4402e26f02d28de90ede904c1f
                  cni.projectcalico.org/podIP: 10.244.107.214/32
                  cni.projectcalico.org/podIPs: 10.244.107.214/32
Status:           Running
IP:               10.244.107.214
IPs:
  IP:           10.244.107.214
Controlled By:  ReplicaSet/nfs-client-provisioner-784b8b5966
Containers:
  nfs-client-provisioner:
    Container ID:   containerd://380120998576c3d67a05feee495de37b00eebd48c7e312cbfcac677ad1af44dd
    Image:          registry.cn-beijing.aliyuncs.com/pylixm/nfs-subdir-external-provisioner:v4.0.0
    Image ID:       registry.cn-beijing.aliyuncs.com/pylixm/nfs-subdir-external-provisioner@sha256:f93d46d5d58fb908701b09e7738f23bca806031cfe3e52b484e8a6c0147c8667
    Port:           <none>
    Host Port:      <none>
    State:          Running
      Started:      Sun, 01 Feb 2026 13:57:01 +0800
    Ready:          True
    Restart Count:  1
    Environment:
      PROVISIONER_NAME:  k8s-sigs.io/nfs-subdir-external-provisioner
      NFS_SERVER:        192.168.1.160
      NFS_PATH:          /media/nfs
    Mounts:
      /persistentvolumes from nfs-client-root (rw)
      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-tjznt (ro)
Conditions:
  Type                        Status
  PodReadyToStartContainers   True
  Initialized                 True
  Ready                       True
  ContainersReady             True
  PodScheduled                True
Volumes:
  nfs-client-root:
    Type:      NFS (an NFS mount that lasts the lifetime of a pod)
    Server:    192.168.1.160
    Path:      /media/nfs
    ReadOnly:  false
  kube-api-access-tjznt:
    Type:                    Projected (a volume that contains injected data from multiple sources)
    TokenExpirationSeconds:  3607
    ConfigMapName:           kube-root-ca.crt
    Optional:                false
    DownwardAPI:             true
QoS Class:                   BestEffort
Node-Selectors:              <none>
Tolerations:                 node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
                             node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:                      <none>




root@k8s-master1:~# kubectl get sc -o wide
NAME                   PROVISIONER                                   RECLAIMPOLICY   VOLUMEBINDINGMODE   ALLOWVOLUMEEXPANSION   AGE
nfs-client (default)   k8s-sigs.io/nfs-subdir-external-provisioner   Delete          Immediate           false                  13d

root@k8s-master1:~# kubectl describe sc nfs-client
Name:            nfs-client
IsDefaultClass:  Yes
Annotations:     kubectl.kubernetes.io/last-applied-configuration={"apiVersion":"storage.k8s.io/v1","kind":"StorageClass","metadata":{"annotations":{"storageclass.kubernetes.io/is-default-class":"true"},"name":"nfs-client"},"parameters":{"archiveOnDelete":"false"},"provisioner":"k8s-sigs.io/nfs-subdir-external-provisioner"}
,storageclass.kubernetes.io/is-default-class=true
Provisioner:           k8s-sigs.io/nfs-subdir-external-provisioner
Parameters:            archiveOnDelete=false
AllowVolumeExpansion:  <unset>
MountOptions:          <none>
ReclaimPolicy:         Delete
VolumeBindingMode:     Immediate
Events:                <none>




root@k8s-master1:~# kubectl get pvc -o wide
NAME                        STATUS   VOLUME                                     CAPACITY   ACCESS MODES   STORAGECLASS   VOLUMEATTRIBUTESCLASS   AGE     VOLUMEMODE
data-my-release-etcd-0      Bound    pvc-e22946b5-db39-4367-95c5-8b2fb00bf5d4   10Gi       RWO            nfs-client     <unset>                 7d18h   Filesystem
data-my-release-etcd-1      Bound    pvc-2e1af3d1-0b67-4d66-9c99-3e5f811c183c   10Gi       RWO            nfs-client     <unset>                 7d18h   Filesystem
data-my-release-etcd-2      Bound    pvc-02bfdd79-9a30-43b3-9275-e524f5270b19   10Gi       RWO            nfs-client     <unset>                 7d18h   Filesystem
export-my-release-minio-0   Bound    pvc-44807339-18c0-4820-bf47-56fb708a68ac   500Gi      RWO            nfs-client     <unset>                 7d18h   Filesystem
export-my-release-minio-1   Bound    pvc-355a169d-7646-419b-a0bc-eb263fc99da7   500Gi      RWO            nfs-client     <unset>                 7d18h   Filesystem
export-my-release-minio-2   Bound    pvc-4b708049-7188-4513-ac5b-201f5b621834   500Gi      RWO            nfs-client     <unset>                 7d18h   Filesystem
export-my-release-minio-3   Bound    pvc-d951bd54-fdc5-4045-877e-f91d1de24dc1   500Gi      RWO            nfs-client     <unset>                 7d18h   Filesystem
root@k8s-master1:~# kubectl get pv -o wide
NAME                                       CAPACITY   ACCESS MODES   RECLAIM POLICY   STATUS   CLAIM                               STORAGECLASS   VOLUMEATTRIBUTESCLASS   REASON   AGE     VOLUMEMODE
pvc-02bfdd79-9a30-43b3-9275-e524f5270b19   10Gi       RWO            Delete           Bound    default/data-my-release-etcd-2      nfs-client     <unset>                          7d18h   Filesystem
pvc-2e1af3d1-0b67-4d66-9c99-3e5f811c183c   10Gi       RWO            Delete           Bound    default/data-my-release-etcd-1      nfs-client     <unset>                          7d18h   Filesystem
pvc-355a169d-7646-419b-a0bc-eb263fc99da7   500Gi      RWO            Delete           Bound    default/export-my-release-minio-1   nfs-client     <unset>                          7d18h   Filesystem
pvc-44807339-18c0-4820-bf47-56fb708a68ac   500Gi      RWO            Delete           Bound    default/export-my-release-minio-0   nfs-client     <unset>                          7d18h   Filesystem
pvc-4b708049-7188-4513-ac5b-201f5b621834   500Gi      RWO            Delete           Bound    default/export-my-release-minio-2   nfs-client     <unset>                          7d18h   Filesystem
pvc-d951bd54-fdc5-4045-877e-f91d1de24dc1   500Gi      RWO            Delete           Bound    default/export-my-release-minio-3   nfs-client     <unset>                          7d18h   Filesystem
pvc-e22946b5-db39-4367-95c5-8b2fb00bf5d4   10Gi       RWO            Delete           Bound    default/data-my-release-etcd-0      nfs-client     <unset>                          7d18h   Filesystem

准备milvus的部署yaml配置文件和镜像

# 手动获取image,并打tag
ctr image pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
ctr image tag swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9 docker.io/milvusdb/milvus:v2.6.9
ctr image pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus-operator:v1.3.5
ctr image tag swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus-operator:v1.3.5 docker.io/milvusdb/milvus-operator:v1.3.5
crictl pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/etcd:3.5.25-r1
ctr -n k8s.io images tag swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/etcd:3.5.25-r1 docker.io/milvusdb/etcd:3.5.25-r1
crictl pull swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/minio/minio:RELEASE.2024-12-18T13-15-44Z
ctr -n k8s.io images tag swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/minio/minio:RELEASE.2024-12-18T13-15-44Z docker.io/minio/minio:RELEASE.2024-12-18T13-15-44Z
kubectl apply -f https://raw.githubusercontent.com/zilliztech/milvus-operator/main/deploy/manifests/deployment.yaml

kubectl apply -f https://raw.githubusercontent.com/zilliztech/milvus-operator/main/config/samples/milvus_cluster_woodpecker.yaml


root@k8s-master1:~# kubectl get pods -o wide
NAME                                               READY   STATUS    RESTARTS   AGE     IP               NODE        NOMINATED NODE   READINESS GATES
my-attu-785b784687-jh8bh                           1/1     Running   1          6d22h   10.244.107.212   k8s-node3   <none>           <none>
my-release-etcd-0                                  1/1     Running   2          7d18h   10.244.36.86     k8s-node1   <none>           <none>
my-release-etcd-1                                  1/1     Running   2          7d18h   10.244.169.157   k8s-node2   <none>           <none>
my-release-etcd-2                                  1/1     Running   2          7d18h   10.244.107.213   k8s-node3   <none>           <none>
my-release-milvus-datanode-659c998467-6728j        1/1     Running   4          7d18h   10.244.107.216   k8s-node3   <none>           <none>
my-release-milvus-mixcoord-5f7765dd4f-nxbnn        1/1     Running   6          7d18h   10.244.169.156   k8s-node2   <none>           <none>
my-release-milvus-proxy-7564dd898f-4n8cl           1/1     Running   4          7d18h   10.244.107.215   k8s-node3   <none>           <none>
my-release-milvus-querynode-0-7684886cfb-qrpnd     1/1     Running   7          7d18h   10.244.169.151   k8s-node2   <none>           <none>
my-release-milvus-streamingnode-67cf4664df-qpzf8   1/1     Running   4          7d18h   10.244.107.211   k8s-node3   <none>           <none>
my-release-minio-0                                 1/1     Running   2          7d18h   10.244.107.217   k8s-node3   <none>           <none>
my-release-minio-1                                 1/1     Running   2          7d18h   10.244.169.153   k8s-node2   <none>           <none>
my-release-minio-2                                 1/1     Running   2          7d18h   10.244.36.87     k8s-node1   <none>           <none>
my-release-minio-3                                 1/1     Running   2          7d18h   10.244.36.88     k8s-node1   <none>           <none>
nfs-client-provisioner-784b8b5966-rh6g6            1/1     Running   1          6d23h   10.244.107.214   k8s-node3   <none>           <none>
root@k8s-master1:~# kubectl get milvus my-release -o yaml
apiVersion: milvus.io/v1beta1
kind: Milvus
metadata:
  annotations:
    kubectl.kubernetes.io/last-applied-configuration: |
      {"apiVersion":"milvus.io/v1beta1","kind":"Milvus","metadata":{"annotations":{},"labels":{"app":"milvus"},"name":"my-release","namespace":"default"},"spec":{"components":{"image":"swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9"},"config":{},"dependencies":{"msgStreamType":"woodpecker"},"mode":"cluster"}}
    milvus.io/dependency-values-merged: "true"
    milvus.io/pod-service-label-added: "true"
    milvus.io/querynode-current-group-id: "0"
  creationTimestamp: "2026-01-24T13:33:06Z"
  finalizers:
  - milvus.milvus.io/finalizer
  generation: 3
  labels:
    app: milvus
    milvus.io/operator-version: 1.3.5
  name: my-release
  namespace: default
  resourceVersion: "173777"
  uid: baff8004-1917-432d-bc07-10b67b29f15a
spec:
  components:
    dataNode:
      paused: false
      probes: null
      replicas: 1
      securityContext: null
    disableMetric: false
    enableRollingUpdate: true
    image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
    imageUpdateMode: rollingUpgrade
    metricInterval: ""
    mixCoord:
      paused: false
      probes: null
      replicas: 1
      securityContext: null
    paused: false
    probes: null
    proxy:
      paused: false
      probes: null
      replicas: 1
      securityContext: null
      serviceType: ClusterIP
    queryNode:
      paused: false
      probes: null
      replicas: 1
      securityContext: null
    rollingMode: 2
    securityContext: null
    standalone:
      paused: false
      probes: null
      replicas: 0
      securityContext: null
      serviceType: ClusterIP
    streamingMode: true
    streamingNode:
      paused: false
      probes: null
      replicas: 1
      securityContext: null
    targetPortType: string
    updateConfigMapOnly: true
  config:
    dataCoord:
      enableActiveStandby: true
    indexCoord:
      enableActiveStandby: true
    queryCoord:
      enableActiveStandby: true
    rootCoord:
      enableActiveStandby: true
  dependencies:
    customMsgStream: null
    etcd:
      endpoints:
      - my-release-etcd-0.my-release-etcd-headless.default:2379
      - my-release-etcd-1.my-release-etcd-headless.default:2379
      - my-release-etcd-2.my-release-etcd-headless.default:2379
      external: false
      inCluster:
        deletionPolicy: Retain
        values:
          auth:
            rbac:
              create: false
              enabled: false
            token:
              enabled: false
          autoCompactionMode: revision
          autoCompactionRetention: "1000"
          enabled: true
          extraEnvVars:
          - name: ETCD_QUOTA_BACKEND_BYTES
            value: "4294967296"
          - name: ETCD_HEARTBEAT_INTERVAL
            value: "500"
          - name: ETCD_ELECTION_TIMEOUT
            value: "2500"
          image:
            pullPolicy: IfNotPresent
            repository: milvusdb/etcd
            tag: 3.5.25-r1
          livenessProbe:
            enabled: true
            timeoutSeconds: 10
          name: etcd
          pdb:
            create: false
          persistence:
            accessMode: ReadWriteOnce
            enabled: true
            size: 10Gi
            storageClass: null
          readinessProbe:
            enabled: true
            periodSeconds: 20
            timeoutSeconds: 10
          replicaCount: 3
          service:
            peerPort: 2380
            port: 2379
            type: ClusterIP
    kafka:
      external: false
    msgStreamType: woodpecker
    natsmq:
      persistence:
        persistentVolumeClaim:
          spec: null
    pulsar:
      endpoint: ""
      external: false
    rocksmq:
      persistence:
        persistentVolumeClaim:
          spec: null
    storage:
      endpoint: my-release-minio.default:9000
      external: false
      inCluster:
        deletionPolicy: Retain
        values:
          accessKey: minioadmin
          bucketName: milvus-bucket
          enabled: true
          existingSecret: ""
          iamEndpoint: ""
          image:
            pullPolicy: IfNotPresent
            tag: RELEASE.2024-12-18T13-15-44Z
          livenessProbe:
            enabled: true
            failureThreshold: 5
            initialDelaySeconds: 5
            periodSeconds: 5
            successThreshold: 1
            timeoutSeconds: 5
          mode: distributed
          name: minio
          persistence:
            accessMode: ReadWriteOnce
            enabled: true
            existingClaim: ""
            size: 500Gi
            storageClass: null
          podDisruptionBudget:
            enabled: false
          readinessProbe:
            enabled: true
            failureThreshold: 5
            initialDelaySeconds: 5
            periodSeconds: 5
            successThreshold: 1
            timeoutSeconds: 1
          region: ""
          resources:
            requests:
              memory: 2Gi
          rootPath: file
          secretKey: minioadmin
          service:
            port: 9000
            type: ClusterIP
          startupProbe:
            enabled: true
            failureThreshold: 60
            initialDelaySeconds: 0
            periodSeconds: 10
            successThreshold: 1
            timeoutSeconds: 5
          useIAM: false
          useVirtualHost: false
      secretRef: my-release-minio
      type: MinIO
    tei:
      enabled: false
    woodpecker:
      persistence:
        persistentVolumeClaim:
          spec: null
  hookConfig: null
  mode: cluster
status:
  componentsDeployStatus:
    datanode:
      generation: 1
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        availableReplicas: 1
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:35:25Z"
          message: ReplicaSet "my-release-milvus-datanode-659c998467" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        - lastTransitionTime: "2026-02-01T05:58:01Z"
          lastUpdateTime: "2026-02-01T05:58:01Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        observedGeneration: 1
        readyReplicas: 1
        replicas: 1
        updatedReplicas: 1
    mixcoord:
      generation: 1
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        availableReplicas: 1
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:35:24Z"
          message: ReplicaSet "my-release-milvus-mixcoord-5f7765dd4f" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        - lastTransitionTime: "2026-02-01T05:57:54Z"
          lastUpdateTime: "2026-02-01T05:57:54Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        observedGeneration: 1
        readyReplicas: 1
        replicas: 1
        updatedReplicas: 1
    proxy:
      generation: 1
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        availableReplicas: 1
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:35:25Z"
          message: ReplicaSet "my-release-milvus-proxy-7564dd898f" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        - lastTransitionTime: "2026-02-01T05:58:22Z"
          lastUpdateTime: "2026-02-01T05:58:22Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        observedGeneration: 1
        readyReplicas: 1
        replicas: 1
        updatedReplicas: 1
    querynode:
      generation: 2
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        availableReplicas: 1
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:33:24Z"
          message: ReplicaSet "my-release-milvus-querynode-0-7684886cfb" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        - lastTransitionTime: "2026-02-01T05:58:06Z"
          lastUpdateTime: "2026-02-01T05:58:06Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        observedGeneration: 2
        readyReplicas: 1
        replicas: 1
        updatedReplicas: 1
    standalone:
      generation: 2
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:33:23Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:33:24Z"
          message: ReplicaSet "my-release-milvus-standalone-5c8db65c88" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        observedGeneration: 2
    streamingnode:
      generation: 1
      image: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
      status:
        availableReplicas: 1
        conditions:
        - lastTransitionTime: "2026-01-24T13:33:23Z"
          lastUpdateTime: "2026-01-24T13:35:40Z"
          message: ReplicaSet "my-release-milvus-streamingnode-67cf4664df" has successfully
            progressed.
          reason: NewReplicaSetAvailable
          status: "True"
          type: Progressing
        - lastTransitionTime: "2026-02-01T05:58:20Z"
          lastUpdateTime: "2026-02-01T05:58:20Z"
          message: Deployment has minimum availability.
          reason: MinimumReplicasAvailable
          status: "True"
          type: Available
        observedGeneration: 1
        readyReplicas: 1
        replicas: 1
        updatedReplicas: 1
  conditions:
  - lastTransitionTime: "2026-02-01T05:58:24Z"
    message: All Milvus components are healthy
    reason: ReasonMilvusHealthy
    status: "True"
    type: MilvusReady
  - lastTransitionTime: "2026-01-24T13:35:53Z"
    message: Milvus components are all updated
    reason: MilvusComponentsUpdated
    status: "True"
    type: MilvusUpdated
  - lastTransitionTime: "2026-01-25T08:02:05Z"
    message: Etcd endpoints is healthy
    reason: EtcdReady
    status: "True"
    type: EtcdReady
  - lastTransitionTime: "2026-01-25T08:02:05Z"
    reason: StorageReady
    status: "True"
    type: StorageReady
  - lastTransitionTime: "2026-01-24T13:33:23Z"
    reason: MsgStreamReady
    status: "True"
    type: MsgStreamReady
  currentImage: swr.cn-north-4.myhuaweicloud.com/ddn-k8s/docker.io/milvusdb/milvus:v2.6.9
  endpoint: my-release-milvus.default:19530
  ingress:
    loadBalancer: {}
  observedGeneration: 3
  rollingModeVersion: 2
  status: Healthy
# 配置临时端口转发,启动后ctrl+c,即停止
kubectl port-forward service/my-release-milvus 27017:19530 Forwarding from 127.0.0.1:27017 -> 19530

# 配置NodePort转发,之后访问任一节点的端口即可访问
kubectl expose deployment my-release-milvus-proxy --type=NodePort --port=9091,19530

# 查看当前节点转发端口
root@k8s-master1:~# kubectl get svc
NAME                       TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)                          AGE
kubernetes                 ClusterIP   10.10.0.1       <none>        443/TCP                          14d
my-attu                    NodePort    10.10.141.249   <none>        3000:32402/TCP                   6d23h
my-attu-svc                ClusterIP   10.10.48.87     <none>        3000/TCP                         6d23h
my-release-etcd            ClusterIP   10.10.22.97     <none>        2379/TCP,2380/TCP                7d19h
my-release-etcd-headless   ClusterIP   None            <none>        2379/TCP,2380/TCP                7d19h
my-release-milvus          ClusterIP   10.10.187.170   <none>        19530/TCP,9091/TCP               7d18h
my-release-milvus-proxy    NodePort    10.10.77.132    <none>        9091:31818/TCP,19530:30900/TCP   7d17h
my-release-minio           ClusterIP   10.10.222.172   <none>        9000/TCP                         7d19h
my-release-minio-svc       ClusterIP   None            <none>        9000/TCP                         7d19h
root@k8s-master1:~# kubectl get deployment
NAME                              READY   UP-TO-DATE   AVAILABLE   AGE
my-attu                           1/1     1            1           6d23h
my-release-milvus-datanode        1/1     1            1           7d18h
my-release-milvus-mixcoord        1/1     1            1           7d18h
my-release-milvus-proxy           1/1     1            1           7d18h
my-release-milvus-querynode-0     1/1     1            1           7d18h
my-release-milvus-querynode-1     0/0     0            0           7d18h
my-release-milvus-standalone      0/0     0            0           7d18h
my-release-milvus-streamingnode   1/1     1            1           7d18h
nfs-client-provisioner            1/1     1            1           13d

# 浏览器打开milvus监控页面
http://192.168.1.151:31818/webui/

部署Attu图形界面

# 部署图形界面
kubectl apply -f https://raw.githubusercontent.com/zilliztech/attu/main/attu-k8s-deploy.yaml
# 设置节点转发
kubectl expose deployment my-attu --type=NodePort --port=3000
# 访问地址
http://192.168.1.156:32402/

测试说明

https://milvus.io/docs/zh/quickstart.md