weave icon indicating copy to clipboard operation
weave copied to clipboard

Set cannot be destroyed: it is in use by a kernel component

Open kallisti5 opened this issue 4 years ago • 6 comments

What happened?

Getting "ipset v7.2: Set cannot be destroyed: it is in use by a kernel component"

[root@k10dal1terarocketio ~]# kubectl get all --all-namespaces
NAMESPACE     NAME                                              READY   STATUS              RESTARTS   AGE
kube-system   pod/coredns-f9fd979d6-mdnst                       0/1     ContainerCreating   0          117s
kube-system   pod/coredns-f9fd979d6-xxzvl                       0/1     ContainerCreating   0          117s
kube-system   pod/etcd-k10dal1terarocketio                      1/1     Running             0          2m8s
kube-system   pod/kube-apiserver-k10dal1terarocketio            1/1     Running             0          2m8s
kube-system   pod/kube-controller-manager-k10dal1terarocketio   1/1     Running             0          2m8s
kube-system   pod/kube-proxy-m74fl                              1/1     Running             0          117s
kube-system   pod/kube-scheduler-k10dal1terarocketio            1/1     Running             0          2m8s
kube-system   pod/weave-net-sfwg2                               1/2     CrashLoopBackOff    4          117s

NAMESPACE     NAME                 TYPE        CLUSTER-IP   EXTERNAL-IP   PORT(S)                  AGE
default       service/kubernetes   ClusterIP   10.96.0.1    <none>        443/TCP                  2m15s
kube-system   service/kube-dns     ClusterIP   10.96.0.10   <none>        53/UDP,53/TCP,9153/TCP   2m14s

NAMESPACE     NAME                        DESIRED   CURRENT   READY   UP-TO-DATE   AVAILABLE   NODE SELECTOR            AGE
kube-system   daemonset.apps/kube-proxy   1         1         1       1            1           kubernetes.io/os=linux   2m14s
kube-system   daemonset.apps/weave-net    1         1         0       1            0           <none>                   2m8s

NAMESPACE     NAME                      READY   UP-TO-DATE   AVAILABLE   AGE
kube-system   deployment.apps/coredns   0/2     2            0           2m14s

NAMESPACE     NAME                                DESIRED   CURRENT   READY   AGE
kube-system   replicaset.apps/coredns-f9fd979d6   2         2         0       117s

How to reproduce it?

  • CentOS 8
  • cri-o
  • Kubernetes 1.19.3
  • Deploy weave: kubectl apply -f "https://cloud.weave.works/k8s/net?k8s-version=$(kubectl version | base64 | tr -d '\n')&env.IPTABLES_BACKEND=nft"

Anything else we need to know?

Vultr VM. CentOS 8, 4GiB of RAM

Versions:

[root@server ~]# uname -a
Linux server 4.18.0-193.19.1.el8_2.x86_64 #1 SMP Mon Sep 14 14:37:00 UTC 2020 x86_64 x86_64 x86_64 GNU/Linux
[root@server ~]# kubectl version
Client Version: version.Info{Major:"1", Minor:"19", GitVersion:"v1.19.3", GitCommit:"1e11e4a2108024935ecfcb2912226cedeafd99df", GitTreeState:"clean", BuildDate:"2020-10-14T12:50:19Z", GoVersion:"go1.15.2", Compiler:"gc", Platform:"linux/amd64"}
Server Version: version.Info{Major:"1", Minor:"19", GitVersion:"v1.19.3", GitCommit:"1e11e4a2108024935ecfcb2912226cedeafd99df", GitTreeState:"clean", BuildDate:"2020-10-14T12:41:49Z", GoVersion:"go1.15.2", Compiler:"gc", Platform:"linux/amd64"}

Logs:

[root@server ~]# kubectl -n kube-system logs pod/weave-net-sfwg2 weave
ipset v7.2: Set cannot be destroyed: it is in use by a kernel component
[root@server ~]# kubectl -n kube-system logs pod/weave-net-sfwg2 weave-npc
INFO: 2020/10/20 14:16:48.880872 Starting Weaveworks NPC 2.7.0; node name "k10dal1terarocketio"
INFO: 2020/10/20 14:16:48.881322 Serving /metrics on :6781
Tue Oct 20 14:16:48 2020 <5> ulogd.c:408 registering plugin `NFLOG'
Tue Oct 20 14:16:48 2020 <5> ulogd.c:408 registering plugin `BASE'
Tue Oct 20 14:16:48 2020 <5> ulogd.c:408 registering plugin `PCAP'
Tue Oct 20 14:16:48 2020 <5> ulogd.c:981 building new pluginstance stack: 'log1:NFLOG,base1:BASE,pcap1:PCAP'
WARNING: scheduler configuration failed: Function not implemented
DEBU: 2020/10/20 14:16:48.904826 Got list of ipsets: [weave-kube-test]
DEBU: 2020/10/20 14:16:48.904849 Flushing ipset 'weave-kube-test'
DEBU: 2020/10/20 14:16:48.905421 Destroying ipset 'weave-kube-test'
INFO: 2020/10/20 14:16:48.965197 EVENT AddNamespace {"metadata":{"creationTimestamp":"2020-10-20T14:16:20Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:phase":{}}},"manager":"kube-apiserver","operation":"Update","time":"2020-10-20T14:16:20Z"}],"name":"default","resourceVersion":"162","selfLink":"/api/v1/namespaces/default","uid":"b22b510a-72de-49fd-b8c0-a2ee10bea40b"},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}
INFO: 2020/10/20 14:16:48.976327 creating ipset: &npc.selectorSpec{key:"", podSelector:labels.internalSelector{}, namespaceSelector:labels.Selector(nil), policyTypes:[]npc.policyType(nil), ipsetType:"hash:ip", ipsetName:"weave-k?Z;25^M}|1s7P3|H9i;*;MhG", nsName:"default"}
DEBU: 2020/10/20 14:16:48.977034 adding rule for DefaultAllow in namespace: default, chain: WEAVE-NPC-DEFAULT, [-m set --match-set weave-;rGqyMIl1HN^cfDki~Z$3]6!N dst -j ACCEPT -m comment --comment DefaultAllow ingress isolation for namespace: default]
DEBU: 2020/10/20 14:16:48.978996 adding rule for DefaultAllow in namespace: default, chain: WEAVE-NPC-EGRESS-DEFAULT, [-m set --match-set weave-s_+ChJId4Uy_$}G;WdH|~TK)I src -j WEAVE-NPC-EGRESS-ACCEPT -m comment --comment DefaultAllow egress isolation for namespace: default]
DEBU: 2020/10/20 14:16:48.980585 adding rule for DefaultAllow in namespace: default, chain: WEAVE-NPC-EGRESS-DEFAULT, [-m set --match-set weave-s_+ChJId4Uy_$}G;WdH|~TK)I src -j RETURN -m comment --comment DefaultAllow egress isolation for namespace: default]
INFO: 2020/10/20 14:16:48.982342 EVENT AddNamespace {"metadata":{"creationTimestamp":"2020-10-20T14:16:19Z","managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:status":{"f:phase":{}}},"manager":"kube-apiserver","operation":"Update","time":"2020-10-20T14:16:19Z"}],"name":"kube-system","resourceVersion":"10","selfLink":"/api/v1/namespaces/kube-system","uid":"1d3ff025-501b-4395-a50f-6b25a7650da3"},"spec":{"finalizers":["kubernetes"]},"status":{"phase":"Active"}}
INFO: 2020/10/20 14:16:48.985383 creating ipset: &npc.selectorSpec{key:"", podSelector:labels.internalSelector{}, namespaceSelector:labels.Selector(nil), policyTypes:[]npc.policyType(nil), ipsetType:"hash:ip", ipsetName:"weave-iuZcey(5DeXbzgRFs8Szo]+@p", nsName:"kube-system"}
DEBU: 2020/10/20 14:16:48.985956 adding rule for DefaultAllow in namespace: kube-system, chain: WEAVE-NPC-DEFAULT, [-m set --match-set weave-P.B|!ZhkAr5q=XZ?3}tMBA+0 dst -j ACCEPT -m comment --comment DefaultAllow ingress isolation for namespace: kube-system]
DEBU: 2020/10/20 14:16:48.987540 adding rule for DefaultAllow in namespace: kube-system, chain: WEAVE-NPC-EGRESS-DEFAULT, [-m set --match-set weave-E1ney4o[ojNrLk.6rOHi;7MPE src -j WEAVE-NPC-EGRESS-ACCEPT -m comment --comment DefaultAllow egress isolation for namespace: kube-system]
DEBU: 2020/10/20 14:16:48.989199 adding rule for DefaultAllow in namespace: kube-system, chain: WEAVE-NPC-EGRESS-DEFAULT, [-m set --match-set weave-E1ney4o[ojNrLk.6rOHi;7MPE src -j RETURN -m comment --comment DefaultAllow egress isolation for namespace: kube-system]
DEBU: 2020/10/20 14:16:48.992608 EVENT AddPod

Network:

# ipset list
Name: weave-;rGqyMIl1HN^cfDki~Z$3]6!N
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 1
Number of entries: 0
Members:

Name: weave-s_+ChJId4Uy_$}G;WdH|~TK)I
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 2
Number of entries: 0
Members:

Name: weave-k?Z;25^M}|1s7P3|H9i;*;MhG
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 0
Number of entries: 0
Members:

Name: weave-P.B|!ZhkAr5q=XZ?3}tMBA+0
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 1
Number of entries: 0
Members:

Name: weave-E1ney4o[ojNrLk.6rOHi;7MPE
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 2
Number of entries: 0
Members:

Name: weave-iuZcey(5DeXbzgRFs8Szo]+@p
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 0
Number of entries: 0
Members:

Name: weave-Rzff}h:=]JaaJl/G;(XJpGjZ[
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 1
Number of entries: 0
Members:

Name: weave-41s)5vQ^o/xWGz6a20N:~?#|E
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 2
Number of entries: 0
Members:

Name: weave-4vtqMI+kx/2]jD%_c0S%thO%V
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 0
Number of entries: 0
Members:

Name: weave-]B*(W?)t*z5O17G044[gUo#$l
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 1
Number of entries: 0
Members:

Name: weave-sui%__gZ}{kX~oZgI_Ttqp=Dp
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 2
Number of entries: 0
Members:

Name: weave-mF}1zpEo4W6iYroE^=:V3{S6W
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536 comment
Size in memory: 120
References: 0
Number of entries: 0
Members:

Name: weave-kube-test
Type: hash:ip
Revision: 4
Header: family inet hashsize 1024 maxelem 65536
Size in memory: 120
References: 0
Number of entries: 0
Members:

kallisti5 avatar Oct 20 '20 14:10 kallisti5

This happens every time weave is deployed.

kallisti5 avatar Oct 20 '20 14:10 kallisti5

If I ipset destroy weave-kube-test, it just gets recreated by the weave pod every time it attempts to start.

kallisti5 avatar Oct 20 '20 15:10 kallisti5

Seeing folks hacking in a sleep 1 before the ipset destroy call: https://github.com/replicatedhq/kURL/pull/717/commits/877767c3abf7f498574f9b2e4199e208683a4dca

It's kind of messy, but maybe something that needs added until the race condition can be figured out.

kallisti5 avatar Oct 20 '20 15:10 kallisti5

The solution listed above immediately fixed the issue for me

kallisti5 avatar Oct 20 '20 17:10 kallisti5

It seems worked for me, thank you @kallisti5. However, in CentOS 8, the issue can reproduced more easily.

fuzhibo avatar Jan 15 '21 08:01 fuzhibo

This also worked for me when using Kubespray to deploy Kubernetes cluster with Weave. After modifying roles/network_plugin/weave/templates/weave-net.yml.j2 according to the patch provided above everything worked like a charm. Thanks!

Also using CentOS 8.

matulek avatar Mar 02 '21 22:03 matulek