openshift version v3.11.0+d0c29df-98
kubernetes version v1.11.0+d4cacc0
istio version: 1.4.0
envoy log
2019-12-12T00:16:14.116554Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:16.116541Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:18.116424Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:20.116711Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:22.116448Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:24.116455Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:26.117034Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:28.140398Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:30.119316Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:32.117052Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:34.118077Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
2019-12-12T00:16:36.116392Z info Envoy proxy is NOT ready: config not received from Pilot (is Pilot running?): cds updates: 1 successful, 0 rejected; lds updates: 0 successful, 0 rejected
init proxy log
Environment:
------------
ENVOY_PORT=
INBOUND_CAPTURE_PORT=
ISTIO_INBOUND_INTERCEPTION_MODE=
ISTIO_INBOUND_TPROXY_MARK=
+ iptables -t nat -N ISTIO_REDIRECT
ISTIO_INBOUND_TPROXY_ROUTE_TABLE=
ISTIO_INBOUND_PORTS=
ISTIO_LOCAL_EXCLUDE_PORTS=
ISTIO_SERVICE_CIDR=
ISTIO_SERVICE_EXCLUDE_CIDR=
Variables:
----------
PROXY_PORT=15001
PROXY_INBOUND_CAPTURE_PORT=15006
PROXY_UID=1337
INBOUND_INTERCEPTION_MODE=REDIRECT
INBOUND_TPROXY_MARK=1337
INBOUND_TPROXY_ROUTE_TABLE=133
INBOUND_PORTS_INCLUDE=*
INBOUND_PORTS_EXCLUDE=15020
OUTBOUND_IP_RANGES_INCLUDE=*
OUTBOUND_IP_RANGES_EXCLUDE=
OUTBOUND_PORTS_EXCLUDE=
KUBEVIRT_INTERFACES=
ENABLE_INBOUND_IPV6=
+ iptables -t nat -A ISTIO_REDIRECT -p tcp -j REDIRECT --to-port 15001
+ iptables -t nat -N ISTIO_IN_REDIRECT
+ '[' '*' == '*' ']'
+ iptables -t nat -A ISTIO_IN_REDIRECT -p tcp -j REDIRECT --to-port 15006
+ '[' -n '*' ']'
+ '[' REDIRECT = TPROXY ']'
+ table=nat
+ iptables -t nat -N ISTIO_INBOUND
+ iptables -t nat -A PREROUTING -p tcp -j ISTIO_INBOUND
+ '[' '*' == '*' ']'
+ iptables -t nat -A ISTIO_INBOUND -p tcp --dport 22 -j RETURN
+ '[' -n 15020 ']'
+ for port in ${INBOUND_PORTS_EXCLUDE}
+ iptables -t nat -A ISTIO_INBOUND -p tcp --dport 15020 -j RETURN
+ '[' REDIRECT = TPROXY ']'
+ iptables -t nat -A ISTIO_INBOUND -p tcp -j ISTIO_IN_REDIRECT
+ iptables -t nat -N ISTIO_OUTPUT
+ iptables -t nat -A OUTPUT -p tcp -j ISTIO_OUTPUT
+ '[' -n '' ']'
+ iptables -t nat -A ISTIO_OUTPUT -o lo -s 127.0.0.6/32 -j RETURN
+ '[' -z '' ']'
+ iptables -t nat -A ISTIO_OUTPUT -o lo '!' -d 127.0.0.1/32 -j ISTIO_IN_REDIRECT
+ for uid in ${PROXY_UID}
+ iptables -t nat -A ISTIO_OUTPUT -m owner --uid-owner 1337 -j RETURN
+ for gid in ${PROXY_GID}
+ iptables -t nat -A ISTIO_OUTPUT -m owner --gid-owner 1337 -j RETURN
+ iptables -t nat -A ISTIO_OUTPUT -d 127.0.0.1/32 -j RETURN
+ '[' 0 -gt 0 ']'
+ '[' 1 -gt 0 ']'
+ '[' '*' == '*' ']'
+ iptables -t nat -A ISTIO_OUTPUT -j ISTIO_REDIRECT
+ set +o nounset
+ '[' -n '' ']'
+ ip6tables -F INPUT
ip6tables v1.6.1: can't initialize ip6tables table `filter': Table does not exist (do you need to insmod?)
Perhaps ip6tables or your kernel needs to be upgraded.
+ true
+ ip6tables -A INPUT -m state --state ESTABLISHED -j ACCEPT
ip6tables v1.6.1: can't initialize ip6tables table `filter': Table does not exist (do you need to insmod?)
Perhaps ip6tables or your kernel needs to be upgraded.
+ true
+ ip6tables -A INPUT -i lo -d ::1 -j ACCEPT
ip6tables v1.6.1: can't initialize ip6tables table `filter': Table does not exist (do you need to insmod?)
Perhaps ip6tables or your kernel needs to be upgraded.
+ true
+ ip6tables -A INPUT -j REJECT
ip6tables v1.6.1: can't initialize ip6tables table `filter': Table does not exist (do you need to insmod?)
Perhaps ip6tables or your kernel needs to be upgraded.
+ true
+ dump
+ iptables-save
# Generated by iptables-save v1.6.1 on Thu Dec 12 00:08:53 2019
*nat
:PREROUTING ACCEPT [0:0]
:INPUT ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
:ISTIO_INBOUND - [0:0]
:ISTIO_IN_REDIRECT - [0:0]
:ISTIO_OUTPUT - [0:0]
:ISTIO_REDIRECT - [0:0]
-A PREROUTING -p tcp -j ISTIO_INBOUND
-A OUTPUT -p tcp -j ISTIO_OUTPUT
-A ISTIO_INBOUND -p tcp -m tcp --dport 22 -j RETURN
-A ISTIO_INBOUND -p tcp -m tcp --dport 15020 -j RETURN
-A ISTIO_INBOUND -p tcp -j ISTIO_IN_REDIRECT
-A ISTIO_IN_REDIRECT -p tcp -j REDIRECT --to-ports 15006
+ ip6tables-save
-A ISTIO_OUTPUT -s 127.0.0.6/32 -o lo -j RETURN
-A ISTIO_OUTPUT ! -d 127.0.0.1/32 -o lo -j ISTIO_IN_REDIRECT
-A ISTIO_OUTPUT -m owner --uid-owner 1337 -j RETURN
-A ISTIO_OUTPUT -m owner --gid-owner 1337 -j RETURN
-A ISTIO_OUTPUT -d 127.0.0.1/32 -j RETURN
-A ISTIO_OUTPUT -j ISTIO_REDIRECT
-A ISTIO_REDIRECT -p tcp -j REDIRECT --to-ports 15001
COMMIT
polit discovery log
2019-12-12T00:16:11.734660Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: Watch close - *v1.Endpoints total 154 items received
2019-12-12T00:16:16.226698Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:16:16.431924Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:16:16.500081Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:16:16.692034Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:16:49.913331Z info transport: closing server transport due to maximum connection age.
2019-12-12T00:16:49.913520Z info transport: loopyWriter.run returning. connection error: desc = "transport is closing"
2019-12-12T00:16:49.913539Z info transport: http2Server.HandleStreams failed to read frame: read tcp 10.242.1.88:15010->10.240.0.100:55154: use of closed network connection
2019-12-12T00:16:49.913580Z info ads v2/ads.go:171 ADS: "10.240.0.100:55154" sidecar~10.240.0.100~library-front-istio-885547d66-4vmg8.spaceiwknmd3f~spaceiwknmd3f.svc.cluster.local-1028 terminated with stream closed
istio.io/pkg/log.(*Scope).emit
istio.io/pkg@v0.0.0-20191030005435-10d06b6b315e/log/scope.go:277
istio.io/pkg/log.(*Scope).Infof
istio.io/pkg@v0.0.0-20191030005435-10d06b6b315e/log/scope.go:213
istio.io/istio/pilot/pkg/proxy/envoy/v2.receiveThread
istio.io/istio@/pilot/pkg/proxy/envoy/v2/ads.go:171
2019-12-12T00:16:50.325494Z info ads v2/ads.go:247 ADS:CDS: REQ 10.240.0.100:33372 sidecar~10.240.0.100~library-front-istio-885547d66-4vmg8.spaceiwknmd3f~spaceiwknmd3f.svc.cluster.local-1069 1.845887ms version:2019-12-11T10:55:54Z/61
istio.io/pkg/log.(*Scope).emit
istio.io/pkg@v0.0.0-20191030005435-10d06b6b315e/log/scope.go:277
istio.io/pkg/log.(*Scope).Infof
istio.io/pkg@v0.0.0-20191030005435-10d06b6b315e/log/scope.go:213
istio.io/istio/pilot/pkg/proxy/envoy/v2.(*DiscoveryServer).StreamAggregatedResources
istio.io/istio@/pilot/pkg/proxy/envoy/v2/ads.go:247
github.com/envoyproxy/go-control-plane/envoy/service/discovery/v2._AggregatedDiscoveryService_StreamAggregatedResources_Handler
github.com/envoyproxy/go-control-plane@v0.9.1-0.20191002184426-9d865299d2ff/envoy/service/discovery/v2/ads.pb.go:181
google.golang.org/grpc.(*Server).processStreamingRPC
google.golang.org/grpc@v1.24.0/server.go:1199
google.golang.org/grpc.(*Server).handleStream
google.golang.org/grpc@v1.24.0/server.go:1279
google.golang.org/grpc.(*Server).serveStreams.func1.1
google.golang.org/grpc@v1.24.0/server.go:710
2019-12-12T00:17:16.227017Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:17:16.432665Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:17:16.500879Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:17:16.692665Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:17:32.754733Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: Watch close - *v1.Node total 301 items received
2019-12-12T00:18:16.227368Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:18:16.435215Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:18:16.502501Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
2019-12-12T00:18:16.695067Z debug k8s.io/client-go@v11.0.1-0.20190409021438-1a26190bd76a+incompatible/tools/cache/reflector.go:98: forcing resync
polit istio-proxy log
- address:
socket_address:
address: "0.0.0.0"
port_value: 15005
filter_chains:
- filters:
- config:
access_log:
- config:
path: /dev/stdout
name: envoy.file_access_log
http_filters:
- config:
default_destination_service: istio-pilot.cloudos-mesh.svc.cluster.local
service_configs:
istio-pilot.cloudos-mesh.svc.cluster.local:
disable_check_calls: true
mixer_attributes:
attributes:
destination.service.host:
string_value: istio-pilot.cloudos-mesh.svc.cluster.local
destination.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
destination.namespace:
string_value: cloudos-mesh
destination.ip:
bytes_value: AAAAAAAAAAAAAP//CvIBWA==
destination.port:
int64_value: 15005
context.reporter.kind:
string_value: inbound
context.reporter.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
transport:
check_cluster: mixer_check_server
report_cluster: mixer_report_server
attributes_for_mixer_proxy:
attributes:
source.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
name: mixer
- name: envoy.router
route_config:
name: "15005"
virtual_hosts:
- domains:
- '*'
name: istio-pilot.cloudos-mesh.svc.cluster.local
routes:
- decorator:
operation: v1
match:
prefix: /
route:
cluster: in.8080
timeout: 0.000s
stat_prefix: "15005"
name: envoy.http_connection_manager
tls_context:
common_tls_context:
alpn_protocols:
- http/1.1
tls_certificates:
- certificate_chain:
filename: /etc/certs/cert-chain.pem
private_key:
filename: /etc/certs/key.pem
validation_context:
trusted_ca:
filename: /etc/certs/root-cert.pem
require_client_certificate: true
name: "15005"
- address:
socket_address:
address: "0.0.0.0"
port_value: 15007
filter_chains:
- filters:
- config:
access_log:
- config:
path: /dev/stdout
name: envoy.file_access_log
http_filters:
- config:
default_destination_service: istio-pilot.cloudos-mesh.svc.cluster.local
service_configs:
istio-pilot.cloudos-mesh.svc.cluster.local:
disable_check_calls: true
mixer_attributes:
attributes:
destination.service.host:
string_value: istio-pilot.cloudos-mesh.svc.cluster.local
destination.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
destination.namespace:
string_value: cloudos-mesh
destination.ip:
bytes_value: AAAAAAAAAAAAAP//CvIBWA==
destination.port:
int64_value: 15007
context.reporter.kind:
string_value: inbound
context.reporter.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
transport:
check_cluster: mixer_check_server
report_cluster: mixer_report_server
attributes_for_mixer_proxy:
attributes:
source.uid:
string_value: kubernetes://istio-pilot-7b645455cb-8kzzs.cloudos-mesh
name: mixer
- name: envoy.router
route_config:
name: "15007"
virtual_hosts:
- domains:
- '*'
name: istio-pilot.cloudos-mesh.svc.cluster.local
routes:
- decorator:
operation: v1
match:
prefix: /
route:
cluster: in.8080
timeout: 0.000s
stat_prefix: "15007"
name: envoy.http_connection_manager
name: "15007"
2019-12-11T06:17:15.591080Z info PilotSAN []string(nil)
2019-12-11T06:17:15.591165Z info Starting proxy agent
2019-12-11T06:17:15.591384Z info Received new config, creating new Envoy epoch 0
2019-12-11T06:17:15.591485Z info watching /etc/certs for changes
2019-12-11T06:17:15.591485Z info Epoch 0 starting
2019-12-11T06:17:15.591929Z info Envoy command: [-c /etc/istio/proxy/envoy.yaml --restart-epoch 0 --drain-time-s 45 --parent-shutdown-time-s 60 --service-cluster istio-pilot --service-node sidecar~10.242.1.88~istio-pilot-7b645455cb-8kzzs.cloudos-mesh~cloudos-mesh.svc.cluster.local --max-obj-name-len 189 --local-address-ip-version v4 --log-format [Envoy (Epoch 0)] [%Y-%m-%d %T.%e][%t][%l][%n] %v -l warning --component-log-level misc:error]
2019-12-11T08:11:04.456353Z info watchFileEvents: "/etc/certs": MODIFY|ATTRIB
2019-12-11T08:11:04.456489Z info watchFileEvents: "/etc/certs/..2019_12_11_06_17_09.476982294": MODIFY|ATTRIB
2019-12-11T08:11:04.456507Z info watchFileEvents: "/etc/certs/..data": MODIFY|ATTRIB
2019-12-11T08:11:04.456536Z info watchFileEvents: "/etc/certs/cert-chain.pem": MODIFY|ATTRIB
2019-12-11T08:11:04.456557Z info watchFileEvents: "/etc/certs/key.pem": MODIFY|ATTRIB
2019-12-11T08:11:04.456570Z info watchFileEvents: "/etc/certs/root-cert.pem": MODIFY|ATTRIB
2019-12-11T08:11:14.456630Z info watchFileEvents: notifying
actually ,when i restart polit ,It didn’t seem like a problem, but a day later, it showed up again
Can you share anything about your environment? Are both pilot and the sidecar in the same Kubernetes cluster? What steps did you take to install? Are you running your own cluster, or on a managed cluster like GKE or AKS?
I deployed on five servers, and the configuration information is as follows,all nodes use the same centos.
---download istio 1.4 ---
oc adm policy add-scc-to-group anyuid system:serviceaccounts -n istio-system
oc patch clusterrole kiali -p '[{"op":"add", "path":"/rules/-", "value":{"apiGroups":["apps.openshift.io"], "resources":["deploymentconfigs"],"verbs": ["get", "list", "watch"]}}]' --type json
oc patch clusterrole kiali -p '[{"op":"add", "path":"/rules/-", "value":{"apiGroups":["project.openshift.io"], "resources":["projects"],"verbs": ["get"]}}]' --type json
oc patch clusterrole kiali -p '[{"op":"add", "path":"/rules/-", "value":{"apiGroups":["route.openshift.io"], "resources":["routes"],"verbs": ["get"]}}]' --type json
---change istio-demo.yaml use namespace cloudos-mesh for istio---
kubectl create -f istio-demo-cloudos-mesh.yaml
[root@node147 istio1210]# lsb_release -a
LSB Version: :core-4.1-amd64:core-4.1-noarch
Distributor ID: CentOS
Description: CentOS Linux release 7.6.1810 (Core)
Release: 7.6.1810
Codename: Core
[root@master3091 istio-1.3.6]# kubectl get nodes
NAME STATUS ROLES AGE VERSION
master3091 Ready compute,infra,master 22d v1.11.0+d4cacc0
slave3092 Ready compute,infra,master 22d v1.11.0+d4cacc0
slave3093 Ready compute,infra,master 22d v1.11.0+d4cacc0
worker3094 Ready compute 21d v1.11.0+d4cacc0
worker3095 Ready compute 21d v1.11.0+d4cacc0
[root@master3091 istio-1.3.6]# kubectl top nodes
NAME CPU(cores) CPU% MEMORY(bytes) MEMORY%
master3091 928m 4% 32087Mi 32%
slave3092 4328m 21% 24890Mi 25%
slave3093 8647m 43% 35291Mi 35%
worker3094 514m 12% 4571Mi 12%
worker3095 687m 17% 5887Mi 32%
[root@master3091 istio-1.3.6]# pod|grep cloudos-mesh
cloudos-mesh grafana-66d6465fbf-9vftl 1/1 Running 0 3h 10.242.1.149 slave3093 <none>
cloudos-mesh istio-citadel-6ccbd54947-nwgxb 1/1 Running 0 3h 10.242.1.151 slave3093 <none>
cloudos-mesh istio-egressgateway-fc58fc5d7-k88rr 1/1 Running 0 3h 10.244.0.117 worker3095 <none>
cloudos-mesh istio-galley-58f6574445-4749m 1/1 Running 0 3h 10.242.1.154 slave3093 <none>
cloudos-mesh istio-ingressgateway-697fc957bb-dv676 1/1 Running 0 3h 10.244.0.118 worker3095 <none>
cloudos-mesh istio-pilot-84855b787b-hnzdk 2/2 Running 0 3h 10.242.1.147 slave3093 <none>
cloudos-mesh istio-policy-548c9c7466-sn7zc 2/2 Running 1 3h 10.242.1.146 slave3093 <none>
cloudos-mesh istio-sidecar-injector-66f885f45-dl7kl 1/1 Running 0 3h 10.242.1.153 slave3093 <none>
cloudos-mesh istio-telemetry-76fd76dcb4-gbmlv 2/2 Running 2 3h 10.244.0.120 worker3095 <none>
cloudos-mesh istio-tracing-7dd9b9548f-qb25j 1/1 Running 0 3h 10.242.1.150 slave3093 <none>
cloudos-mesh kiali-5b76d95ddf-5fmsx 1/1 Running 0 3h 10.244.0.119 worker3095 <none>
cloudos-mesh prometheus-6ccfbc78c-mhcdb 1/1 Running 0 3h 10.242.1.152 slave3093 <none>
This is no longer a problem, it can be solved by adding openshift permissions
ctrl
March 4, 2020, 2:00pm
11
这边是怎么通过openshift permissions解决的呢