diff --git a/.run/查询应用分支-3570.run.xml b/.run/查询应用分支-3570.run.xml
new file mode 100644
index 0000000..5eeb935
--- /dev/null
+++ b/.run/查询应用分支-3570.run.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.run/重启DEMO-3570.run.xml b/.run/重启DEMO-3570.run.xml
new file mode 100644
index 0000000..3ef9fbd
--- /dev/null
+++ b/.run/重启DEMO-3570.run.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/agent-common/real_project/CmiiImageListConfig.go b/agent-common/real_project/CmiiImageListConfig.go
index 8b6ac72..fca8e4c 100644
--- a/agent-common/real_project/CmiiImageListConfig.go
+++ b/agent-common/real_project/CmiiImageListConfig.go
@@ -1,5 +1,90 @@
package real_project
+var Cmii620ImageList = []string{
+ "harbor.cdcyy.com.cn/cmii/cmii-open-gateway:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-depotautoreturn:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-integration:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-grid-manage:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-process:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-threedsimulation:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-bridge:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uas-lifecycle:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-airspace:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-data-post-process:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-industrial-portfolio:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-logger:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-mission:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-sense-adapter:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-app-release:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-autowaypoint:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-cms:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-developer:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-oauth:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uas-gateway:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-iot-dispatcher:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-cloud-live:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-clusters:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-device:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-material-warehouse:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-waypoint:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-multilink:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-gis-server:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-sync:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-brain:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-mqtthandler:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-notice:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-tower:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-advanced5g:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-admin-gateway:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-alarm:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-emergency:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-user:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-kpi-monitor:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-grid-datasource:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-grid-engine:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-surveillance:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uavms-security-center:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-fwdd:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-admin-data:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-admin-user:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-suav-supervision:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-gateway:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-ai-brain:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-uas:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-suav-platform-supervisionh5:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-emergency-rescue:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-media:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-oms:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-threedsimulation:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-dispatchh5:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-detection:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-jiangsuwenlv:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-logistics:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-multiterminal:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-qinghaitourism:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-splice:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-open:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-seniclive:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uavms-platform-security-center:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-suav-platform-supervision:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-uasms:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-pilot2-to-cloud:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-base:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-hljtt:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-securityh5:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-share:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-cms-portal:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-mws:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-security:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-visualization:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-armypeople:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-uav-platform-qingdao:6.2.0-demo",
+ "harbor.cdcyy.com.cn/cmii/cmii-live-operator:5.2.0",
+ "harbor.cdcyy.com.cn/cmii/srs:v5.0.195",
+ "harbor.cdcyy.com.cn/cmii/cmii-srs-oss-adaptor:2023-SA-skip-CHL",
+}
+
var Cmii611ImageList = []string{
"harbor.cdcyy.com.cn/cmii/cmii-uav-brain:6.1.1",
"harbor.cdcyy.com.cn/cmii/cmii-uav-depotautoreturn:6.1.1",
diff --git a/agent-common/real_project/cmii-iot-dispatcher.yaml b/agent-common/real_project/cmii-iot-dispatcher.yaml
index 6a7427c..56e709a 100644
--- a/agent-common/real_project/cmii-iot-dispatcher.yaml
+++ b/agent-common/real_project/cmii-iot-dispatcher.yaml
@@ -2,7 +2,7 @@ kind: Deployment
apiVersion: apps/v1
metadata:
name: cmii-uav-iot-dispatcher
- namespace: jxejpt
+ namespace: hbyd
labels:
app.kubernetes.io/app-version: 5.7.0
app.kubernetes.io/managed-by: octopus-control
@@ -28,7 +28,7 @@ spec:
claimName: nfs-backend-log-pvc
containers:
- name: cmii-uav-iot-dispatcher
- image: '10.20.1.135:8033/cmii/cmii-uav-iot-dispatcher:5.7.0'
+ image: '192.168.0.10:8033/cmii/cmii-uav-iot-dispatcher:6.1.0'
ports:
- name: pod-port
containerPort: 8080
@@ -53,7 +53,7 @@ spec:
- name: SVC_NAME
value: cmlc-uav-iot-dispatcher-svc
- name: K8S_NAMESPACE
- value: jxejpt
+ value: hbyd
- name: APPLICATION_NAME
value: cmii-uav-iot-dispatcher
- name: CUST_JAVA_OPTS
@@ -107,7 +107,7 @@ kind: Service
apiVersion: v1
metadata:
name: cmii-uav-iot-dispatcher
- namespace: jxejpt
+ namespace: hbyd
labels:
app.kubernetes.io/app-version: 5.7.0
app.kubernetes.io/managed-by: octopus-control
diff --git a/agent-common/real_project/ynydapp/k8s-backend.yaml b/agent-common/real_project/ynydapp/k8s-backend.yaml
new file mode 100644
index 0000000..48e9246
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-backend.yaml
@@ -0,0 +1,6533 @@
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-emergency
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-emergency
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-emergency
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-emergency
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-emergency
+ image: 192.168.186.11:8033/cmii/cmii-uav-emergency:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-emergency
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-emergency
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-emergency
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-emergency
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-emergency
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-sync
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sync
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sync
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sync
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-sync
+ image: 192.168.186.11:8033/cmii/cmii-uav-sync:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-sync
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-sync
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-sync
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sync
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-sync
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-grid-engine
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-engine
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-engine
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-engine
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-grid-engine
+ image: 192.168.186.11:8033/cmii/cmii-uav-grid-engine:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-grid-engine
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-grid-engine
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-grid-engine
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-engine
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-engine
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-grid-manage
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-manage
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-manage
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-manage
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-grid-manage
+ image: 192.168.186.11:8033/cmii/cmii-uav-grid-manage:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-grid-manage
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-grid-manage
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-grid-manage
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-manage
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-manage
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-process
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-process
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-process
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-process
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-process
+ image: 192.168.186.11:8033/cmii/cmii-uav-process:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-process
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-process
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-process
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-process
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-process
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-autowaypoint
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-autowaypoint
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-autowaypoint
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-autowaypoint
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-autowaypoint
+ image: 192.168.186.11:8033/cmii/cmii-uav-autowaypoint:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-autowaypoint
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-autowaypoint
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-autowaypoint
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-autowaypoint
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-autowaypoint
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-material-warehouse
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-material-warehouse
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-material-warehouse
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-material-warehouse
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-material-warehouse
+ image: 192.168.186.11:8033/cmii/cmii-uav-material-warehouse:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-material-warehouse
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-material-warehouse
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-material-warehouse
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-material-warehouse
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-material-warehouse
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-device
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-device
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-device
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-device
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-device
+ image: 192.168.186.11:8033/cmii/cmii-uav-device:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-device
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-device
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-device
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-device
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-device
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-cms
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cms
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cms
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cms
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-cms
+ image: 192.168.186.11:8033/cmii/cmii-uav-cms:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-cms
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-cms
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-cms
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cms
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-cms
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-cloud-live
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cloud-live
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cloud-live
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cloud-live
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-cloud-live
+ image: 192.168.186.11:8033/cmii/cmii-uav-cloud-live:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-cloud-live
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-cloud-live
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-cloud-live
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-cloud-live
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-cloud-live
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-sense-adapter
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sense-adapter
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sense-adapter
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sense-adapter
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-sense-adapter
+ image: 192.168.186.11:8033/cmii/cmii-uav-sense-adapter:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-sense-adapter
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-sense-adapter
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-sense-adapter
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-sense-adapter
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-sense-adapter
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-suav-supervision
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-suav-supervision
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-suav-supervision
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-suav-supervision
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-suav-supervision
+ image: 192.168.186.11:8033/cmii/cmii-suav-supervision:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-suav-supervision
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-suav-supervision
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-suav-supervision
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-suav-supervision
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-suav-supervision
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-airspace
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-airspace
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-airspace
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-airspace
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-airspace
+ image: 192.168.186.11:8033/cmii/cmii-uav-airspace:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-airspace
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-airspace
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-airspace
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-airspace
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-airspace
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-developer
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-developer
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-developer
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-developer
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-developer
+ image: 192.168.186.11:8033/cmii/cmii-uav-developer:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-developer
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-developer
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-developer
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-developer
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-developer
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-bridge
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-bridge
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-bridge
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-bridge
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-bridge
+ image: 192.168.186.11:8033/cmii/cmii-uav-bridge:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-bridge
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-bridge
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-bridge
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-bridge
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-bridge
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-kpi-monitor
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-kpi-monitor
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-kpi-monitor
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-kpi-monitor
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-kpi-monitor
+ image: 192.168.186.11:8033/cmii/cmii-uav-kpi-monitor:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-kpi-monitor
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-kpi-monitor
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-kpi-monitor
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-kpi-monitor
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-kpi-monitor
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-admin-user
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-user
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-admin-user
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-user
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-admin-user
+ image: 192.168.186.11:8033/cmii/cmii-admin-user:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-admin-user
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-admin-user
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-admin-user
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-user
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-admin-user
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-gis-server
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gis-server
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gis-server
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gis-server
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-gis-server
+ image: 192.168.186.11:8033/cmii/cmii-uav-gis-server:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-gis-server
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-gis-server
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-gis-server
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gis-server
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-gis-server
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-open-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-open-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-open-gateway
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-open-gateway
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-open-gateway
+ image: 192.168.186.11:8033/cmii/cmii-open-gateway:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-open-gateway
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-open-gateway
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-open-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-open-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-open-gateway
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-waypoint
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-waypoint
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-waypoint
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-waypoint
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-waypoint
+ image: 192.168.186.11:8033/cmii/cmii-uav-waypoint:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-waypoint
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-waypoint
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-waypoint
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-waypoint
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-waypoint
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gateway
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gateway
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-gateway
+ image: 192.168.186.11:8033/cmii/cmii-uav-gateway:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-gateway
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-gateway
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-gateway
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uas-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uas-gateway
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-gateway
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uas-gateway
+ image: 192.168.186.11:8033/cmii/cmii-uas-gateway:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uas-gateway
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uas-gateway
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uas-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uas-gateway
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-logger
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-logger
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-logger
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-logger
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-logger
+ image: 192.168.186.11:8033/cmii/cmii-uav-logger:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-logger
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-logger
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-logger
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-logger
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-logger
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-alarm
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-alarm
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-alarm
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-alarm
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-alarm
+ image: 192.168.186.11:8033/cmii/cmii-uav-alarm:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-alarm
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-alarm
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-alarm
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-alarm
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-alarm
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uavms-security-center
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uavms-security-center
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uavms-security-center
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uavms-security-center
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uavms-security-center
+ image: 192.168.186.11:8033/cmii/cmii-uavms-security-center:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uavms-security-center
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uavms-security-center
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uavms-security-center
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uavms-security-center
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uavms-security-center
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-user
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-user
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-user
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-user
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-user
+ image: 192.168.186.11:8033/cmii/cmii-uav-user:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-user
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-user
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-user
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-user
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-user
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-oauth
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-oauth
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-oauth
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-oauth
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-oauth
+ image: 192.168.186.11:8033/cmii/cmii-uav-oauth:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-oauth
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-oauth
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-oauth
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-oauth
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-oauth
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-mission
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mission
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mission
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mission
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-mission
+ image: 192.168.186.11:8033/cmii/cmii-uav-mission:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-mission
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-mission
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-mission
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mission
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-mission
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-fwdd
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-fwdd
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-fwdd
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-fwdd
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-fwdd
+ image: 192.168.186.11:8033/cmii/cmii-uav-fwdd:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-fwdd
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-fwdd
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-fwdd
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-fwdd
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-fwdd
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-grid-datasource
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-datasource
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-datasource
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-datasource
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-grid-datasource
+ image: 192.168.186.11:8033/cmii/cmii-uav-grid-datasource:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-grid-datasource
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-grid-datasource
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-grid-datasource
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-datasource
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-grid-datasource
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-admin-data
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-data
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-admin-data
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-data
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-admin-data
+ image: 192.168.186.11:8033/cmii/cmii-admin-data:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-admin-data
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-admin-data
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-admin-data
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-data
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-admin-data
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-admin-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-admin-gateway
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-gateway
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-admin-gateway
+ image: 192.168.186.11:8033/cmii/cmii-admin-gateway:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-admin-gateway
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-admin-gateway
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-admin-gateway
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-admin-gateway
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-admin-gateway
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-clusters
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-clusters
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-clusters
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-clusters
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-clusters
+ image: 192.168.186.11:8033/cmii/cmii-uav-clusters:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-clusters
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-clusters
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-clusters
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-clusters
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-clusters
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-surveillance
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-surveillance
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-surveillance
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-surveillance
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-surveillance
+ image: 192.168.186.11:8033/cmii/cmii-uav-surveillance:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-surveillance
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-surveillance
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-surveillance
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-surveillance
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-surveillance
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-mqtthandler
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mqtthandler
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mqtthandler
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mqtthandler
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-mqtthandler
+ image: 192.168.186.11:8033/cmii/cmii-uav-mqtthandler:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-mqtthandler
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-mqtthandler
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-mqtthandler
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-mqtthandler
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-mqtthandler
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uas-lifecycle
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-lifecycle
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uas-lifecycle
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-lifecycle
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uas-lifecycle
+ image: 192.168.186.11:8033/cmii/cmii-uas-lifecycle:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uas-lifecycle
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uas-lifecycle
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uas-lifecycle
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uas-lifecycle
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uas-lifecycle
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-iot-dispatcher
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-iot-dispatcher
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-iot-dispatcher
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-iot-dispatcher
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-iot-dispatcher
+ image: 192.168.186.11:8033/cmii/cmii-uav-iot-dispatcher:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-iot-dispatcher
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-iot-dispatcher
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-iot-dispatcher
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-iot-dispatcher
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-iot-dispatcher
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-industrial-portfolio
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-industrial-portfolio
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-industrial-portfolio
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-industrial-portfolio
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-industrial-portfolio
+ image: 192.168.186.11:8033/cmii/cmii-uav-industrial-portfolio:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-industrial-portfolio
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-industrial-portfolio
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-industrial-portfolio
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-industrial-portfolio
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-industrial-portfolio
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-notice
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-notice
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-notice
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-notice
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-notice
+ image: 192.168.186.11:8033/cmii/cmii-uav-notice:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-notice
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-notice
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-notice
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-notice
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-notice
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-app-release
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-app-release
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-app-release
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-app-release
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-app-release
+ image: 192.168.186.11:8033/cmii/cmii-app-release:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-app-release
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-app-release
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-app-release
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-app-release
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-app-release
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-threedsimulation
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-threedsimulation
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-threedsimulation
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-threedsimulation
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-threedsimulation
+ image: 192.168.186.11:8033/cmii/cmii-uav-threedsimulation:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-threedsimulation
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-threedsimulation
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-threedsimulation
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-threedsimulation
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-threedsimulation
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-brain
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-brain
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-brain
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-brain
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-brain
+ image: 192.168.186.11:8033/cmii/cmii-uav-brain:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-brain
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-brain
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-brain
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-brain
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-brain
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-integration
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-integration
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-integration
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-integration
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-integration
+ image: 192.168.186.11:8033/cmii/cmii-uav-integration:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-integration
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-integration
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-integration
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-integration
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-integration
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-tower
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-tower
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-tower
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-tower
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-tower
+ image: 192.168.186.11:8033/cmii/cmii-uav-tower:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-tower
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-tower
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-tower
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-tower
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-tower
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-multilink
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-multilink
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-multilink
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-multilink
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-multilink
+ image: 192.168.186.11:8033/cmii/cmii-uav-multilink:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-multilink
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-multilink
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-multilink
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-multilink
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-multilink
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-data-post-process
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-data-post-process
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-data-post-process
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-data-post-process
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-data-post-process
+ image: 192.168.186.11:8033/cmii/cmii-uav-data-post-process:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-data-post-process
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-data-post-process
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-data-post-process
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-data-post-process
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-data-post-process
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-depotautoreturn
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-depotautoreturn
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: backend
+ cmii.app: cmii-uav-depotautoreturn
+ template:
+ metadata:
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-depotautoreturn
+ spec:
+ affinity:
+ nodeAffinity:
+ requiredDuringSchedulingIgnoredDuringExecution:
+ nodeSelectorTerms:
+ - matchExpressions:
+ - key: uavcloud.env
+ operator: In
+ values:
+ - ynydapp
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-depotautoreturn
+ image: 192.168.186.11:8033/cmii/cmii-uav-depotautoreturn:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-depotautoreturn
+ - name: CUST_JAVA_OPTS
+ value: "-Xms200m -Xmx1500m -Dlog4j2.formatMsgNoLookups=true"
+ - name: NACOS_REGISTRY
+ value: "helm-nacos:8848"
+ - name: NACOS_DISCOVERY_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: NACOS_DISCOVERY_PORT
+ value: "8080"
+ - name: BIZ_CONFIG_GROUP
+ value: 6.1.0
+ - name: SYS_CONFIG_GROUP
+ value: 6.1.0
+ - name: IMAGE_VERSION
+ value: 6.1.0
+ - name: NACOS_USERNAME
+ value: "developer"
+ - name: NACOS_PASSWORD
+ value: "Deve@9128201"
+ ports:
+ - name: pod-port
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ memory: 2Gi
+ cpu: "2"
+ requests:
+ memory: 200Mi
+ cpu: 200m
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ startupProbe:
+ httpGet:
+ path: /cmii/health
+ port: pod-port
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 3
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 5
+ volumeMounts:
+ - name: nfs-backend-log-volume
+ mountPath: /cmii/logs
+ readOnly: false
+ subPath: ynydapp/cmii-uav-depotautoreturn
+ volumes:
+ - name: nfs-backend-log-volume
+ persistentVolumeClaim:
+ claimName: nfs-backend-log-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-depotautoreturn
+ namespace: ynydapp
+ labels:
+ cmii.type: backend
+ cmii.app: cmii-uav-depotautoreturn
+ octopus/control: backend-app-1.0.0
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: backend
+ cmii.app: cmii-uav-depotautoreturn
+ ports:
+ - name: backend-tcp
+ port: 8080
+ protocol: TCP
+ targetPort: 8080
diff --git a/agent-common/real_project/ynydapp/k8s-configmap.yaml b/agent-common/real_project/ynydapp/k8s-configmap.yaml
new file mode 100644
index 0000000..3300c41
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-configmap.yaml
@@ -0,0 +1,574 @@
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-smauth
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "smauth",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-dikongzhixingh5
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "dikongzhixingh5",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-media
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "media",
+ AppClientId: "APP_4AU8lbifESQO4FD6"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-open
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "open",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-securityh5
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "securityh5",
+ AppClientId: "APP_N3ImO0Ubfu9peRHD"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-dispatchh5
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "dispatchh5",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-pilot2cloud
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "pilot2cloud",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-ai-brain
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "ai-brain",
+ AppClientId: "APP_rafnuCAmBESIVYMH"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-armypeople
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "armypeople",
+ AppClientId: "APP_UIegse6Lfou9pO1U"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-base
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "base",
+ AppClientId: "APP_9LY41OaKSqk2btY0"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-mianyangbackend
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "mianyangbackend",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-oms
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "oms",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-qingdao
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "qingdao",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-scanner
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "scanner",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-pangu
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-cmsportal
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "cmsportal",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-emergency
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "emergency",
+ AppClientId: "APP_aGsTAY1uMZrpKdfk"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-hyper
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "hyper",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-smsecret
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "smsecret",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-eventsh5
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "eventsh5",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-supervisionh5
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "supervisionh5",
+ AppClientId: "APP_qqSu82THfexI8PLM"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-logistics
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "logistics",
+ AppClientId: "APP_PvdfRRRBPL8xbIwl"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-blockchain
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "blockchain",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-splice
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "splice",
+ AppClientId: "APP_zE0M3sTRXrCIJS8Y"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-threedsimulation
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "threedsimulation",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-jiangsuwenlv
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "jiangsuwenlv",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-visualization
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "visualization",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-uas
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "uas",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-supervision
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "supervision",
+ AppClientId: "APP_qqSu82THfexI8PLM"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-multiterminal
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "multiterminal",
+ AppClientId: "APP_PvdfRRRBPL8xbIwl"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-share
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "share",
+ AppClientId: "APP_4lVSVI0ZGxTssir8"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-secenter
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "secenter",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-mws
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "mws",
+ AppClientId: "APP_uKniXPELlRERBBwK"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-qinghaitourism
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "qinghaitourism",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-classification
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "classification",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-traffic
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "traffic",
+ AppClientId: "APP_Jc8i2wOQ1t73QEJS"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-hljtt
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "hljtt",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-uasms
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "uasms",
+ AppClientId: "empty"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-detection
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "detection",
+ AppClientId: "APP_FDHW2VLVDWPnnOCy"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-security
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "security",
+ AppClientId: "APP_JUSEMc7afyWXxvE7"
+ }
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: tenant-prefix-seniclive
+ namespace: ynydapp
+data:
+ ingress-config.js: |-
+ var __GlobalIngressConfig = {
+ TenantEnvironment: "",
+ CloudHOST: "39.129.174.66:8088",
+ ApplicationShortName: "seniclive",
+ AppClientId: "empty"
+ }
diff --git a/agent-common/real_project/ynydapp/k8s-dashboard.yaml b/agent-common/real_project/ynydapp/k8s-dashboard.yaml
new file mode 100644
index 0000000..60b1da6
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-dashboard.yaml
@@ -0,0 +1,309 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+ name: kube-system
+
+---
+
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard
+ namespace: kube-system
+
+---
+
+kind: Service
+apiVersion: v1
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ kubernetes.io/cluster-service: "true"
+ name: kubernetes-dashboard
+ namespace: kube-system
+spec:
+ ports:
+ - port: 443
+ targetPort: 8443
+ nodePort: 39999
+ selector:
+ k8s-app: kubernetes-dashboard
+ type: NodePort
+
+---
+
+apiVersion: v1
+kind: Secret
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard-certs
+ namespace: kube-system
+type: Opaque
+
+---
+
+apiVersion: v1
+kind: Secret
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard-csrf
+ namespace: kube-system
+type: Opaque
+data:
+ csrf: ""
+
+---
+
+apiVersion: v1
+kind: Secret
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard-key-holder
+ namespace: kube-system
+type: Opaque
+
+---
+
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard-settings
+ namespace: kube-system
+
+---
+
+kind: Role
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard
+ namespace: kube-system
+rules:
+ # Allow Dashboard to get, update and delete Dashboard exclusive secrets.
+ - apiGroups: [""]
+ resources: ["secrets"]
+ resourceNames: ["kubernetes-dashboard-key-holder", "kubernetes-dashboard-certs", "kubernetes-dashboard-csrf"]
+ verbs: ["get", "update", "delete"]
+ # Allow Dashboard to get and update 'kubernetes-dashboard-settings' config map.
+ - apiGroups: [""]
+ resources: ["configmaps"]
+ resourceNames: ["kubernetes-dashboard-settings"]
+ verbs: ["get", "update"]
+ # Allow Dashboard to get metrics.
+ - apiGroups: [""]
+ resources: ["services"]
+ resourceNames: ["heapster", "dashboard-metrics-scraper"]
+ verbs: ["proxy"]
+ - apiGroups: [""]
+ resources: ["services/proxy"]
+ resourceNames: ["heapster", "http:heapster:", "https:heapster:", "dashboard-metrics-scraper", "http:dashboard-metrics-scraper"]
+ verbs: ["get"]
+
+---
+
+kind: ClusterRole
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard
+rules:
+ # Allow Metrics Scraper to get metrics from the Metrics server
+ - apiGroups: ["metrics.k8s.io"]
+ resources: ["pods", "nodes"]
+ verbs: ["get", "list", "watch"]
+
+---
+
+apiVersion: rbac.authorization.k8s.io/v1
+kind: RoleBinding
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard
+ namespace: kube-system
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: Role
+ name: kubernetes-dashboard
+subjects:
+ - kind: ServiceAccount
+ name: kubernetes-dashboard
+ namespace: kube-system
+
+---
+
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: kubernetes-dashboard
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: kubernetes-dashboard
+subjects:
+ - kind: ServiceAccount
+ name: kubernetes-dashboard
+ namespace: kube-system
+
+---
+
+kind: Deployment
+apiVersion: apps/v1
+metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ name: kubernetes-dashboard
+ namespace: kube-system
+spec:
+ replicas: 1
+ revisionHistoryLimit: 10
+ selector:
+ matchLabels:
+ k8s-app: kubernetes-dashboard
+ template:
+ metadata:
+ labels:
+ k8s-app: kubernetes-dashboard
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: kubernetes-dashboard
+ image: 192.168.186.11:8033/cmii/dashboard:v2.0.1
+ ports:
+ - containerPort: 8443
+ protocol: TCP
+ args:
+ - --auto-generate-certificates
+ - --namespace=kube-system
+ # Uncomment the following line to manually specify Kubernetes API server Host
+ # If not specified, Dashboard will attempt to auto discover the API server and connect
+ # to it. Uncomment only if the default does not work.
+ # - --apiserver-host=http://my-address:port
+ volumeMounts:
+ - name: kubernetes-dashboard-certs
+ mountPath: /certs
+ # Create on-disk volume to store exec logs
+ - mountPath: /tmp
+ name: tmp-volume
+ livenessProbe:
+ httpGet:
+ scheme: HTTPS
+ path: /
+ port: 8443
+ initialDelaySeconds: 30
+ timeoutSeconds: 30
+ securityContext:
+ allowPrivilegeEscalation: false
+ readOnlyRootFilesystem: true
+ runAsUser: 1001
+ runAsGroup: 2001
+ volumes:
+ - name: kubernetes-dashboard-certs
+ secret:
+ secretName: kubernetes-dashboard-certs
+ - name: tmp-volume
+ emptyDir: {}
+ serviceAccountName: kubernetes-dashboard
+ # Comment the following tolerations if Dashboard must not be deployed on master
+ tolerations:
+ - key: node-role.kubernetes.io/master
+ effect: NoSchedule
+
+---
+
+kind: Service
+apiVersion: v1
+metadata:
+ labels:
+ k8s-app: dashboard-metrics-scraper
+ name: dashboard-metrics-scraper
+ namespace: kube-system
+spec:
+ ports:
+ - port: 8000
+ targetPort: 8000
+ selector:
+ k8s-app: dashboard-metrics-scraper
+
+---
+
+kind: Deployment
+apiVersion: apps/v1
+metadata:
+ labels:
+ k8s-app: dashboard-metrics-scraper
+ name: dashboard-metrics-scraper
+ namespace: kube-system
+spec:
+ replicas: 1
+ revisionHistoryLimit: 10
+ selector:
+ matchLabels:
+ k8s-app: dashboard-metrics-scraper
+ template:
+ metadata:
+ labels:
+ k8s-app: dashboard-metrics-scraper
+ annotations:
+ seccomp.security.alpha.kubernetes.io/pod: 'runtime/default'
+ spec:
+ containers:
+ - name: dashboard-metrics-scraper
+ image: 192.168.186.11:8033/cmii/metrics-scraper:v1.0.4
+ ports:
+ - containerPort: 8000
+ protocol: TCP
+ livenessProbe:
+ httpGet:
+ scheme: HTTP
+ path: /
+ port: 8000
+ initialDelaySeconds: 30
+ timeoutSeconds: 30
+ volumeMounts:
+ - mountPath: /tmp
+ name: tmp-volume
+ securityContext:
+ allowPrivilegeEscalation: false
+ readOnlyRootFilesystem: true
+ runAsUser: 1001
+ runAsGroup: 2001
+ serviceAccountName: kubernetes-dashboard
+ # Comment the following tolerations if Dashboard must not be deployed on master
+ tolerations:
+ - key: node-role.kubernetes.io/master
+ effect: NoSchedule
+ volumes:
+ - name: tmp-volume
+ emptyDir: {}
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: admin-user
+ namespace: kube-system
+
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: admin-user
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: cluster-admin
+subjects:
+ - kind: ServiceAccount
+ name: admin-user
+ namespace: kube-system
diff --git a/agent-common/real_project/ynydapp/k8s-emqx.yaml b/agent-common/real_project/ynydapp/k8s-emqx.yaml
new file mode 100644
index 0000000..b9cf3dd
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-emqx.yaml
@@ -0,0 +1,274 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-emqxs-env
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+data:
+ EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc.cluster.local:443"
+ EMQX_NAME: "helm-emqxs"
+ EMQX_CLUSTER__DISCOVERY: "k8s"
+ EMQX_CLUSTER__K8S__APP_NAME: "helm-emqxs"
+ EMQX_CLUSTER__K8S__SERVICE_NAME: "helm-emqxs-headless"
+ EMQX_CLUSTER__K8S__ADDRESS_TYPE: "dns"
+ EMQX_CLUSTER__K8S__namespace: "ynydapp"
+ EMQX_CLUSTER__K8S__SUFFIX: "svc.cluster.local"
+ EMQX_ALLOW_ANONYMOUS: "false"
+ EMQX_ACL_NOMATCH: "deny"
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-emqxs-cm
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+data:
+ emqx_auth_mnesia.conf: |-
+ auth.mnesia.password_hash = sha256
+
+ # clientid 认证数据
+ # auth.client.1.clientid = admin
+ # auth.client.1.password = 4YPk*DS%+5
+
+ ## username 认证数据
+ auth.user.1.username = admin
+ auth.user.1.password = odD8#Ve7.B
+ auth.user.2.username = cmlc
+ auth.user.2.password = odD8#Ve7.B
+
+ acl.conf: |-
+ {allow, {user, "admin"}, pubsub, ["admin/#"]}.
+ {allow, {user, "dashboard"}, subscribe, ["$SYS/#"]}.
+ {allow, {ipaddr, "127.0.0.1"}, pubsub, ["$SYS/#", "#"]}.
+ {deny, all, subscribe, ["$SYS/#", {eq, "#"}]}.
+ {allow, all}.
+
+ loaded_plugins: |-
+ {emqx_auth_mnesia,true}.
+ {emqx_auth_mnesia,true}.
+ {emqx_management, true}.
+ {emqx_recon, true}.
+ {emqx_retainer, false}.
+ {emqx_dashboard, true}.
+ {emqx_telemetry, true}.
+ {emqx_rule_engine, true}.
+ {emqx_bridge_mqtt, false}.
+---
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+spec:
+ replicas: 1
+ serviceName: helm-emqxs-headless
+ updateStrategy:
+ type: RollingUpdate
+ selector:
+ matchLabels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ template:
+ metadata:
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+ spec:
+ affinity: {}
+ imagePullSecrets:
+ - name: harborsecret
+ serviceAccountName: helm-emqxs
+ containers:
+ - name: helm-emqxs
+ image: 192.168.186.11:8033/cmii/emqx:4.4.19
+ imagePullPolicy: Always
+ ports:
+ - name: mqtt
+ containerPort: 1883
+ - name: mqttssl
+ containerPort: 8883
+ - name: mgmt
+ containerPort: 8081
+ - name: ws
+ containerPort: 8083
+ - name: wss
+ containerPort: 8084
+ - name: dashboard
+ containerPort: 18083
+ - name: ekka
+ containerPort: 4370
+ envFrom:
+ - configMapRef:
+ name: helm-emqxs-env
+ resources: {}
+ volumeMounts:
+ - name: emqx-data
+ mountPath: "/opt/emqx/data/mnesia"
+ readOnly: false
+ - name: helm-emqxs-cm
+ mountPath: "/opt/emqx/etc/plugins/emqx_auth_mnesia.conf"
+ subPath: emqx_auth_mnesia.conf
+ readOnly: false
+# - name: helm-emqxs-cm
+# mountPath: "/opt/emqx/etc/acl.conf"
+# subPath: "acl.conf"
+# readOnly: false
+ - name: helm-emqxs-cm
+ mountPath: "/opt/emqx/data/loaded_plugins"
+ subPath: loaded_plugins
+ readOnly: false
+ volumes:
+ - name: emqx-data
+ persistentVolumeClaim:
+ claimName: helm-emqxs
+ - name: helm-emqxs-cm
+ configMap:
+ name: helm-emqxs-cm
+ items:
+ - key: emqx_auth_mnesia.conf
+ path: emqx_auth_mnesia.conf
+ - key: acl.conf
+ path: acl.conf
+ - key: loaded_plugins
+ path: loaded_plugins
+---
+kind: Role
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+rules:
+ - apiGroups:
+ - ""
+ resources:
+ - endpoints
+ verbs:
+ - get
+ - watch
+ - list
+---
+kind: RoleBinding
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+subjects:
+ - kind: ServiceAccount
+ name: helm-emqxs
+ namespace: ynydapp
+roleRef:
+ kind: Role
+ name: helm-emqxs
+ apiGroup: rbac.authorization.k8s.io
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: NodePort
+ selector:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ ports:
+ - port: 1883
+ name: mqtt
+ targetPort: 1883
+ nodePort: 31883
+ - port: 18083
+ name: dashboard
+ targetPort: 18083
+ nodePort: 38085
+ - port: 8083
+ name: mqtt-websocket
+ targetPort: 8083
+ nodePort: 38083
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-emqxs-headless
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ helm.sh/chart: emqx-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ clusterIP: None
+ selector:
+ cmii.type: middleware
+ cmii.app: helm-emqxs
+ cmii.emqx.architecture: cluster
+ ports:
+ - name: mqtt
+ port: 1883
+ protocol: TCP
+ targetPort: 1883
+ - name: mqttssl
+ port: 8883
+ protocol: TCP
+ targetPort: 8883
+ - name: mgmt
+ port: 8081
+ protocol: TCP
+ targetPort: 8081
+ - name: websocket
+ port: 8083
+ protocol: TCP
+ targetPort: 8083
+ - name: wss
+ port: 8084
+ protocol: TCP
+ targetPort: 8084
+ - name: dashboard
+ port: 18083
+ protocol: TCP
+ targetPort: 18083
+ - name: ekka
+ port: 4370
+ protocol: TCP
+ targetPort: 4370
diff --git a/agent-common/real_project/ynydapp/k8s-frontend.yaml b/agent-common/real_project/ynydapp/k8s-frontend.yaml
new file mode 100644
index 0000000..a745b63
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-frontend.yaml
@@ -0,0 +1,2695 @@
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: nginx-cm
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+data:
+ nginx.conf: |
+ server {
+ listen 9528;
+ server_name localhost;
+ gzip on;
+
+ location / {
+ root /home/cmii-platform/dist;
+ index index.html index.htm;
+ }
+
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root html;
+ }
+ }
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-armypeople
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-armypeople
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-armypeople
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-armypeople
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-armypeople
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-armypeople:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-armypeople
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-armypeople
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-armypeople
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-armypeople
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-armypeople
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-multiterminal
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-multiterminal
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-multiterminal
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-multiterminal
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-multiterminal
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-multiterminal:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-multiterminal
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-multiterminal
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-multiterminal
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-multiterminal
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-multiterminal
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-base
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-base
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-base
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-base
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-base
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-base:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-base
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-base
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-base
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-base
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-base
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-mws
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-mws
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-mws
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-mws
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-mws
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-mws:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-mws
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-mws
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-mws
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-mws
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-mws
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-media
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-media
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-media
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-media
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-media
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-media:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-media
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-media
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-media
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-media
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-media
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-detection
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-detection
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-detection
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-detection
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-detection
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-detection:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-detection
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-detection
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-detection
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-detection
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-detection
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-pilot2-to-cloud
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-pilot2-to-cloud
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-pilot2-to-cloud
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-pilot2-to-cloud
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-pilot2-to-cloud
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-pilot2-to-cloud:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-pilot2-to-cloud
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-pilot2cloud
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-pilot2-to-cloud
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-pilot2-to-cloud
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-pilot2-to-cloud
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-splice
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-splice
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-splice
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-splice
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-splice
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-splice:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-splice
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-splice
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-splice
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-splice
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-splice
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-visualization
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-visualization
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-visualization
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-visualization
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-visualization
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-visualization:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-visualization
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-visualization
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-visualization
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-visualization
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-visualization
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-cms-portal
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-cms-portal
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-cms-portal
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-cms-portal
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-cms-portal
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-cms-portal:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-cms-portal
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-cmsportal
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-cms-portal
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-cms-portal
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-cms-portal
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-uas
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uas
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uas
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uas
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-uas
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-uas:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-uas
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-uas
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-uas
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uas
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uas
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-suav-platform-supervisionh5
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervisionh5
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervisionh5
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervisionh5
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-suav-platform-supervisionh5
+ image: 192.168.186.11:8033/cmii/cmii-suav-platform-supervisionh5:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-suav-platform-supervisionh5
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-supervisionh5
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-suav-platform-supervisionh5
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervisionh5
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervisionh5
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-share
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-share
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-share
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-share
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-share
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-share:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-share
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-share
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-share
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-share
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-share
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-ai-brain
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-ai-brain
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-ai-brain
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-ai-brain
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-ai-brain
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-ai-brain:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-ai-brain
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-ai-brain
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-ai-brain
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-ai-brain
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-ai-brain
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-qinghaitourism
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qinghaitourism
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qinghaitourism
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qinghaitourism
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-qinghaitourism
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-qinghaitourism:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-qinghaitourism
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-qinghaitourism
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-qinghaitourism
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qinghaitourism
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qinghaitourism
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-securityh5
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-securityh5
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-securityh5
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-securityh5
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-securityh5
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-securityh5:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-securityh5
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-securityh5
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-securityh5
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-securityh5
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-securityh5
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-hljtt
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-hljtt
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-hljtt
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-hljtt
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-hljtt
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-hljtt:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-hljtt
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-hljtt
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-hljtt
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-hljtt
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-hljtt
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-seniclive
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-seniclive
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-seniclive
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-seniclive
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-seniclive
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-seniclive:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-seniclive
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-seniclive
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-seniclive
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-seniclive
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-seniclive
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-security
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-security
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-security
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-security
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-security
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-security:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-security
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-security
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-security
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-security
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-security
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-jiangsuwenlv
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-jiangsuwenlv
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-jiangsuwenlv
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-jiangsuwenlv
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-jiangsuwenlv
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-jiangsuwenlv:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-jiangsuwenlv
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-jiangsuwenlv
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-jiangsuwenlv
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-jiangsuwenlv
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-jiangsuwenlv
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-uasms
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uasms
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uasms
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uasms
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-uasms
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-uasms:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-uasms
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-uasms
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-uasms
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uasms
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-uasms
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uavms-platform-security-center
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uavms-platform-security-center
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uavms-platform-security-center
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uavms-platform-security-center
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uavms-platform-security-center
+ image: 192.168.186.11:8033/cmii/cmii-uavms-platform-security-center:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uavms-platform-security-center
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-secenter
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uavms-platform-security-center
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uavms-platform-security-center
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uavms-platform-security-center
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-emergency-rescue
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-emergency-rescue
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-emergency-rescue
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-emergency-rescue
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-emergency-rescue
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-emergency-rescue:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-emergency-rescue
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-emergency
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-emergency-rescue
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-emergency-rescue
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-emergency-rescue
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-open
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-open
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-open
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-open
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-open
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-open:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-open
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-open
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-open
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-open
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-open
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-qingdao
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qingdao
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 0
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qingdao
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qingdao
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-qingdao
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-qingdao:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-qingdao
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-qingdao
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-qingdao
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qingdao
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-qingdao
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-oms
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-oms
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-oms
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-oms
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-oms
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-oms:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-oms
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-oms
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-oms
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-oms
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-oms
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-suav-platform-supervision
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervision
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervision
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervision
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-suav-platform-supervision
+ image: 192.168.186.11:8033/cmii/cmii-suav-platform-supervision:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-suav-platform-supervision
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-supervision
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-suav-platform-supervision
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervision
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-suav-platform-supervision
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-logistics
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-logistics
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-logistics
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-logistics
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-logistics
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-logistics:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-logistics
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-logistics
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-logistics
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-logistics
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-logistics
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform-threedsimulation
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-threedsimulation
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-threedsimulation
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-threedsimulation
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform-threedsimulation
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform-threedsimulation:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform-threedsimulation
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-threedsimulation
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform-threedsimulation
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-threedsimulation
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform-threedsimulation
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: cmii-uav-platform
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/app-version: 6.1.0
+spec:
+ replicas: 1
+ strategy:
+ rollingUpdate:
+ maxUnavailable: 1
+ selector:
+ matchLabels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform
+ template:
+ metadata:
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: cmii-uav-platform
+ image: 192.168.186.11:8033/cmii/cmii-uav-platform:6.1.1
+ imagePullPolicy: Always
+ env:
+ - name: K8S_NAMESPACE
+ value: ynydapp
+ - name: APPLICATION_NAME
+ value: cmii-uav-platform
+ ports:
+ - name: platform-9528
+ containerPort: 9528
+ protocol: TCP
+ resources:
+ limits:
+ cpu: "1"
+ memory: 1Gi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ volumeMounts:
+ - name: nginx-conf
+ mountPath: /etc/nginx/conf.d/nginx.conf
+ subPath: nginx.conf
+ - name: tenant-prefix
+ subPath: ingress-config.js
+ mountPath: /home/cmii-platform/dist/ingress-config.js
+ volumes:
+ - name: nginx-conf
+ configMap:
+ name: nginx-cm
+ items:
+ - key: nginx.conf
+ path: nginx.conf
+ - name: tenant-prefix
+ configMap:
+ name: tenant-prefix-pangu
+ items:
+ - key: ingress-config.js
+ path: ingress-config.js
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: cmii-uav-platform
+ namespace: ynydapp
+ labels:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform
+ octopus.control: frontend-app-wdd
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: ClusterIP
+ selector:
+ cmii.type: frontend
+ cmii.app: cmii-uav-platform
+ ports:
+ - name: web-svc-port
+ port: 9528
+ protocol: TCP
+ targetPort: 9528
diff --git a/agent-common/real_project/ynydapp/k8s-ingress.yaml b/agent-common/real_project/ynydapp/k8s-ingress.yaml
new file mode 100644
index 0000000..2c059d5
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-ingress.yaml
@@ -0,0 +1,702 @@
+---
+apiVersion: networking.k8s.io/v1beta1
+kind: Ingress
+metadata:
+ name: frontend-applications-ingress
+ namespace: ynydapp
+ labels:
+ type: frontend
+ octopus.control: all-ingress-config-wdd
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+ annotations:
+ kubernetes.io/ingress.class: "nginx"
+ nginx.ingress.kubernetes.io/enable-cors: "true"
+ nginx.ingress.kubernetes.io/rewrite-target: /$1
+ nginx.ingress.kubernetes.io/configuration-snippet: |
+ rewrite ^(/supervision)$ $1/ redirect;
+ rewrite ^(/supervisionh5)$ $1/ redirect;
+ rewrite ^(/pangu)$ $1/ redirect;
+ rewrite ^(/ai-brain)$ $1/ redirect;
+ rewrite ^(/armypeople)$ $1/ redirect;
+ rewrite ^(/base)$ $1/ redirect;
+ rewrite ^(/blockchain)$ $1/ redirect;
+ rewrite ^(/classification)$ $1/ redirect;
+ rewrite ^(/cmsportal)$ $1/ redirect;
+ rewrite ^(/detection)$ $1/ redirect;
+ rewrite ^(/dikongzhixingh5)$ $1/ redirect;
+ rewrite ^(/dispatchh5)$ $1/ redirect;
+ rewrite ^(/emergency)$ $1/ redirect;
+ rewrite ^(/eventsh5)$ $1/ redirect;
+ rewrite ^(/hljtt)$ $1/ redirect;
+ rewrite ^(/hyper)$ $1/ redirect;
+ rewrite ^(/jiangsuwenlv)$ $1/ redirect;
+ rewrite ^(/logistics)$ $1/ redirect;
+ rewrite ^(/media)$ $1/ redirect;
+ rewrite ^(/mianyangbackend)$ $1/ redirect;
+ rewrite ^(/multiterminal)$ $1/ redirect;
+ rewrite ^(/mws)$ $1/ redirect;
+ rewrite ^(/oms)$ $1/ redirect;
+ rewrite ^(/open)$ $1/ redirect;
+ rewrite ^(/pilot2cloud)$ $1/ redirect;
+ rewrite ^(/qingdao)$ $1/ redirect;
+ rewrite ^(/qinghaitourism)$ $1/ redirect;
+ rewrite ^(/scanner)$ $1/ redirect;
+ rewrite ^(/security)$ $1/ redirect;
+ rewrite ^(/securityh5)$ $1/ redirect;
+ rewrite ^(/seniclive)$ $1/ redirect;
+ rewrite ^(/share)$ $1/ redirect;
+ rewrite ^(/smauth)$ $1/ redirect;
+ rewrite ^(/smsecret)$ $1/ redirect;
+ rewrite ^(/splice)$ $1/ redirect;
+ rewrite ^(/threedsimulation)$ $1/ redirect;
+ rewrite ^(/traffic)$ $1/ redirect;
+ rewrite ^(/uas)$ $1/ redirect;
+ rewrite ^(/uasms)$ $1/ redirect;
+ rewrite ^(/visualization)$ $1/ redirect;
+ rewrite ^(/secenter)$ $1/ redirect;
+spec:
+ rules:
+ - host: fake-domain.ynydapp.io
+ http:
+ paths:
+ - path: /?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform
+ servicePort: 9528
+ - path: /supervision/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-suav-platform-supervision
+ servicePort: 9528
+ - path: /supervisionh5/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-suav-platform-supervisionh5
+ servicePort: 9528
+ - path: /pangu/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform
+ servicePort: 9528
+ - path: /ai-brain/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-ai-brain
+ servicePort: 9528
+ - path: /armypeople/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-armypeople
+ servicePort: 9528
+ - path: /base/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-base
+ servicePort: 9528
+ - path: /blockchain/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-blockchain
+ servicePort: 9528
+ - path: /classification/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-classification
+ servicePort: 9528
+ - path: /cmsportal/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-cms-portal
+ servicePort: 9528
+ - path: /detection/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-detection
+ servicePort: 9528
+ - path: /dikongzhixingh5/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-dikongzhixingh5
+ servicePort: 9528
+ - path: /dispatchh5/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-dispatchh5
+ servicePort: 9528
+ - path: /emergency/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-emergency-rescue
+ servicePort: 9528
+ - path: /eventsh5/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-eventsh5
+ servicePort: 9528
+ - path: /hljtt/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-hljtt
+ servicePort: 9528
+ - path: /hyper/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-hyperspectral
+ servicePort: 9528
+ - path: /jiangsuwenlv/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-jiangsuwenlv
+ servicePort: 9528
+ - path: /logistics/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-logistics
+ servicePort: 9528
+ - path: /media/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-media
+ servicePort: 9528
+ - path: /mianyangbackend/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-mianyangbackend
+ servicePort: 9528
+ - path: /multiterminal/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-multiterminal
+ servicePort: 9528
+ - path: /mws/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-mws
+ servicePort: 9528
+ - path: /oms/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-oms
+ servicePort: 9528
+ - path: /open/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-open
+ servicePort: 9528
+ - path: /pilot2cloud/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-pilot2-to-cloud
+ servicePort: 9528
+ - path: /qingdao/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-qingdao
+ servicePort: 9528
+ - path: /qinghaitourism/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-qinghaitourism
+ servicePort: 9528
+ - path: /scanner/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-scanner
+ servicePort: 9528
+ - path: /security/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-security
+ servicePort: 9528
+ - path: /securityh5/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-securityh5
+ servicePort: 9528
+ - path: /seniclive/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-seniclive
+ servicePort: 9528
+ - path: /share/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-share
+ servicePort: 9528
+ - path: /smauth/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-smauth
+ servicePort: 9528
+ - path: /smsecret/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-smsecret
+ servicePort: 9528
+ - path: /splice/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-splice
+ servicePort: 9528
+ - path: /threedsimulation/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-threedsimulation
+ servicePort: 9528
+ - path: /traffic/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-traffic
+ servicePort: 9528
+ - path: /uas/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-uas
+ servicePort: 9528
+ - path: /uasms/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-uasms
+ servicePort: 9528
+ - path: /visualization/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-platform-visualization
+ servicePort: 9528
+ - path: /secenter/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uavms-platform-security-center
+ servicePort: 9528
+---
+apiVersion: networking.k8s.io/v1beta1
+kind: Ingress
+metadata:
+ name: backend-applications-ingress
+ namespace: ynydapp
+ labels:
+ type: backend
+ octopus.control: all-ingress-config-wdd
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+ annotations:
+ kubernetes.io/ingress.class: "nginx"
+ nginx.ingress.kubernetes.io/enable-cors: "true"
+spec:
+ rules:
+ - host: cmii-admin-data.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-admin-data
+ servicePort: 8080
+ - host: cmii-admin-gateway.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-admin-gateway
+ servicePort: 8080
+ - host: cmii-admin-user.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-admin-user
+ servicePort: 8080
+ - host: cmii-app-release.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-app-release
+ servicePort: 8080
+ - host: cmii-open-gateway.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-open-gateway
+ servicePort: 8080
+ - host: cmii-suav-supervision.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-suav-supervision
+ servicePort: 8080
+ - host: cmii-uas-gateway.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uas-gateway
+ servicePort: 8080
+ - host: cmii-uas-lifecycle.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uas-lifecycle
+ servicePort: 8080
+ - host: cmii-uav-airspace.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-airspace
+ servicePort: 8080
+ - host: cmii-uav-alarm.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-alarm
+ servicePort: 8080
+ - host: cmii-uav-autowaypoint.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-autowaypoint
+ servicePort: 8080
+ - host: cmii-uav-brain.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-brain
+ servicePort: 8080
+ - host: cmii-uav-bridge.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-bridge
+ servicePort: 8080
+ - host: cmii-uav-cloud-live.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-cloud-live
+ servicePort: 8080
+ - host: cmii-uav-clusters.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-clusters
+ servicePort: 8080
+ - host: cmii-uav-cms.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-cms
+ servicePort: 8080
+ - host: cmii-uav-data-post-process.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-data-post-process
+ servicePort: 8080
+ - host: cmii-uav-depotautoreturn.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-depotautoreturn
+ servicePort: 8080
+ - host: cmii-uav-developer.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-developer
+ servicePort: 8080
+ - host: cmii-uav-device.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-device
+ servicePort: 8080
+ - host: cmii-uav-emergency.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-emergency
+ servicePort: 8080
+ - host: cmii-uav-fwdd.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-fwdd
+ servicePort: 8080
+ - host: cmii-uav-gateway.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-gateway
+ servicePort: 8080
+ - host: cmii-uav-gis-server.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-gis-server
+ servicePort: 8080
+ - host: cmii-uav-grid-datasource.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-grid-datasource
+ servicePort: 8080
+ - host: cmii-uav-grid-engine.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-grid-engine
+ servicePort: 8080
+ - host: cmii-uav-grid-manage.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-grid-manage
+ servicePort: 8080
+ - host: cmii-uav-industrial-portfolio.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-industrial-portfolio
+ servicePort: 8080
+ - host: cmii-uav-integration.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-integration
+ servicePort: 8080
+ - host: cmii-uav-iot-dispatcher.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-iot-dispatcher
+ servicePort: 8080
+ - host: cmii-uav-kpi-monitor.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-kpi-monitor
+ servicePort: 8080
+ - host: cmii-uav-logger.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-logger
+ servicePort: 8080
+ - host: cmii-uav-material-warehouse.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-material-warehouse
+ servicePort: 8080
+ - host: cmii-uav-mission.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-mission
+ servicePort: 8080
+ - host: cmii-uav-mqtthandler.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-mqtthandler
+ servicePort: 8080
+ - host: cmii-uav-multilink.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-multilink
+ servicePort: 8080
+ - host: cmii-uav-notice.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-notice
+ servicePort: 8080
+ - host: cmii-uav-oauth.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-oauth
+ servicePort: 8080
+ - host: cmii-uav-process.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-process
+ servicePort: 8080
+ - host: cmii-uav-sense-adapter.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-sense-adapter
+ servicePort: 8080
+ - host: cmii-uav-surveillance.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-surveillance
+ servicePort: 8080
+ - host: cmii-uav-sync.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-sync
+ servicePort: 8080
+ - host: cmii-uav-threedsimulation.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-threedsimulation
+ servicePort: 8080
+ - host: cmii-uav-tower.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-tower
+ servicePort: 8080
+ - host: cmii-uav-user.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-user
+ servicePort: 8080
+ - host: cmii-uav-waypoint.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-waypoint
+ servicePort: 8080
+ - host: cmii-uavms-security-center.uavcloud-ynydapp.io
+ http:
+ paths:
+ - path: /
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uavms-security-center
+ servicePort: 8080
+---
+apiVersion: networking.k8s.io/v1beta1
+kind: Ingress
+metadata:
+ name: all-gateways-ingress
+ namespace: ynydapp
+ labels:
+ type: api-gateway
+ octopus.control: all-ingress-config-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+ annotations:
+ kubernetes.io/ingress.class: "nginx"
+ nginx.ingress.kubernetes.io/enable-cors: "true"
+ nginx.ingress.kubernetes.io/rewrite-target: /$1
+ nginx.ingress.kubernetes.io/configuration-snippet: |
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "Upgrade";
+spec:
+ rules:
+ - host: fake-domain.ynydapp.io
+ http:
+ paths:
+ - path: /oms/api/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-admin-gateway
+ servicePort: 8080
+ - path: /open/api/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-open-gateway
+ servicePort: 8080
+ - path: /api/?(.*)
+ pathType: ImplementationSpecific
+ backend:
+ serviceName: cmii-uav-gateway
+ servicePort: 8080
diff --git a/agent-common/real_project/ynydapp/k8s-mongo.yaml b/agent-common/real_project/ynydapp/k8s-mongo.yaml
new file mode 100644
index 0000000..d0607a0
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-mongo.yaml
@@ -0,0 +1,78 @@
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-mongo
+ namespace: ynydapp
+ labels:
+ cmii.app: helm-mongo
+ cmii.type: middleware
+ helm.sh/chart: mongo-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: NodePort
+ selector:
+ cmii.app: helm-mongo
+ cmii.type: middleware
+ ports:
+ - port: 27017
+ name: server-27017
+ targetPort: 27017
+ nodePort: 37017
+---
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-mongo
+ namespace: ynydapp
+ labels:
+ cmii.app: helm-mongo
+ cmii.type: middleware
+ helm.sh/chart: mongo-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+spec:
+ serviceName: helm-mongo
+ replicas: 1
+ selector:
+ matchLabels:
+ cmii.app: helm-mongo
+ cmii.type: middleware
+ template:
+ metadata:
+ labels:
+ cmii.app: helm-mongo
+ cmii.type: middleware
+ helm.sh/chart: mongo-1.1.0
+ app.kubernetes.io/managed-by: octopus-control
+ app.kubernetes.io/version: 6.1.0
+ annotations:
+ pod.alpha.kubernetes.io/initialized: "true"
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ affinity: {}
+ containers:
+ - name: helm-mongo
+ image: 192.168.186.11:8033/cmii/mongo:5.0
+ resources: {}
+ ports:
+ - containerPort: 27017
+ name: mongo27017
+ protocol: TCP
+ env:
+ - name: MONGO_INITDB_ROOT_USERNAME
+ value: cmlc
+ - name: MONGO_INITDB_ROOT_PASSWORD
+ value: REdPza8#oVlt
+ volumeMounts:
+ - name: mongo-data
+ mountPath: /data/db
+ readOnly: false
+ subPath: default/helm-mongo/data/db
+ volumes:
+ - name: mongo-data
+ persistentVolumeClaim:
+ claimName: helm-mongo
+---
diff --git a/agent-common/real_project/ynydapp/k8s-mysql.yaml b/agent-common/real_project/ynydapp/k8s-mysql.yaml
new file mode 100644
index 0000000..68feedc
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-mysql.yaml
@@ -0,0 +1,410 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: helm-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ annotations: {}
+secrets:
+ - name: helm-mysql
+---
+apiVersion: v1
+kind: Secret
+metadata:
+ name: helm-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+type: Opaque
+data:
+ mysql-root-password: "UXpmWFFoZDNiUQ=="
+ mysql-password: "S0F0cm5PckFKNw=="
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/component: primary
+data:
+ my.cnf: |-
+
+ [mysqld]
+ port=3306
+ basedir=/opt/bitnami/mysql
+ datadir=/bitnami/mysql/data
+ pid-file=/opt/bitnami/mysql/tmp/mysqld.pid
+ socket=/opt/bitnami/mysql/tmp/mysql.sock
+ log-error=/bitnami/mysql/data/error.log
+ general_log_file = /bitnami/mysql/data/general.log
+ slow_query_log_file = /bitnami/mysql/data/slow.log
+ innodb_data_file_path = ibdata1:512M:autoextend
+ innodb_buffer_pool_size = 512M
+ innodb_buffer_pool_instances = 2
+ innodb_log_file_size = 512M
+ innodb_log_files_in_group = 4
+ innodb_log_files_in_group = 4
+ log-bin = /bitnami/mysql/data/mysql-bin
+ max_binlog_size=1G
+ transaction_isolation = REPEATABLE-READ
+ default_storage_engine = innodb
+ character-set-server = utf8mb4
+ collation-server=utf8mb4_bin
+ binlog_format = ROW
+ binlog_rows_query_log_events=on
+ binlog_cache_size=4M
+ binlog_expire_logs_seconds = 1296000
+ max_binlog_cache_size=2G
+ gtid_mode = on
+ enforce_gtid_consistency = 1
+ sync_binlog = 1
+ innodb_flush_log_at_trx_commit = 1
+ innodb_flush_method = O_DIRECT
+ log_slave_updates=1
+ relay_log_recovery = 1
+ relay-log-purge = 1
+ default_time_zone = '+08:00'
+ lower_case_table_names=1
+ log_bin_trust_function_creators=1
+ group_concat_max_len=67108864
+ innodb_io_capacity = 4000
+ innodb_io_capacity_max = 8000
+ innodb_flush_sync = 0
+ innodb_flush_neighbors = 0
+ innodb_write_io_threads = 8
+ innodb_read_io_threads = 8
+ innodb_purge_threads = 4
+ innodb_page_cleaners = 4
+ innodb_open_files = 65535
+ innodb_max_dirty_pages_pct = 50
+ innodb_lru_scan_depth = 4000
+ innodb_checksum_algorithm = crc32
+ innodb_lock_wait_timeout = 10
+ innodb_rollback_on_timeout = 1
+ innodb_print_all_deadlocks = 1
+ innodb_file_per_table = 1
+ innodb_online_alter_log_max_size = 4G
+ innodb_stats_on_metadata = 0
+ innodb_thread_concurrency = 0
+ innodb_sync_spin_loops = 100
+ innodb_spin_wait_delay = 30
+ lock_wait_timeout = 3600
+ slow_query_log = 1
+ long_query_time = 10
+ log_queries_not_using_indexes =1
+ log_throttle_queries_not_using_indexes = 60
+ min_examined_row_limit = 100
+ log_slow_admin_statements = 1
+ log_slow_slave_statements = 1
+ default_authentication_plugin=mysql_native_password
+ skip-name-resolve=1
+ explicit_defaults_for_timestamp=1
+ plugin_dir=/opt/bitnami/mysql/plugin
+ max_allowed_packet=128M
+ max_connections = 2000
+ max_connect_errors = 1000000
+ table_definition_cache=2000
+ table_open_cache_instances=64
+ tablespace_definition_cache=1024
+ thread_cache_size=256
+ interactive_timeout = 600
+ wait_timeout = 600
+ tmpdir=/opt/bitnami/mysql/tmp
+ max_allowed_packet=32M
+ bind-address=0.0.0.0
+ performance_schema = 1
+ performance_schema_instrument = '%memory%=on'
+ performance_schema_instrument = '%lock%=on'
+ innodb_monitor_enable=ALL
+
+ [mysql]
+ no-auto-rehash
+
+ [mysqldump]
+ quick
+ max_allowed_packet = 32M
+
+ [client]
+ port=3306
+ socket=/opt/bitnami/mysql/tmp/mysql.sock
+ default-character-set=UTF8
+ plugin_dir=/opt/bitnami/mysql/plugin
+
+ [manager]
+ port=3306
+ socket=/opt/bitnami/mysql/tmp/mysql.sock
+ pid-file=/opt/bitnami/mysql/tmp/mysqld.pid
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-mysql-init-scripts
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/component: primary
+data:
+ create_users_grants_core.sql: |-
+ create user zyly@'%' identified by 'Cmii@451315';
+ grant select on *.* to zyly@'%';
+ create user zyly_qc@'%' identified by 'Uh)E_owCyb16';
+ grant all on *.* to zyly_qc@'%';
+ create user k8s_admin@'%' identified by 'fP#UaH6qQ3)8';
+ grant all on *.* to k8s_admin@'%';
+ create user audit_dba@'%' identified by 'PjCzqiBmJaTpgkoYXynH';
+ grant all on *.* to audit_dba@'%';
+ create user db_backup@'%' identified by 'RU5Pu(4FGdT9';
+ GRANT SELECT, RELOAD, PROCESS, LOCK TABLES, REPLICATION CLIENT, EVENT on *.* to db_backup@'%';
+ create user monitor@'%' identified by 'PL3#nGtrWbf-';
+ grant REPLICATION CLIENT on *.* to monitor@'%';
+ flush privileges;
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: cmii-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/component: primary
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/name: mysql-db
+ app.kubernetes.io/release: ynydapp
+ cmii.app: mysql
+ cmii.type: middleware
+ octopus.control: mysql-db-wdd
+spec:
+ ports:
+ - name: mysql
+ protocol: TCP
+ port: 13306
+ targetPort: mysql
+ selector:
+ app.kubernetes.io/component: primary
+ app.kubernetes.io/name: mysql-db
+ app.kubernetes.io/release: ynydapp
+ cmii.app: mysql
+ cmii.type: middleware
+ type: ClusterIP
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-mysql-headless
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+ annotations: {}
+spec:
+ type: ClusterIP
+ clusterIP: None
+ publishNotReadyAddresses: true
+ ports:
+ - name: mysql
+ port: 3306
+ targetPort: mysql
+ selector:
+ app.kubernetes.io/name: mysql-db
+ app.kubernetes.io/release: ynydapp
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+ annotations: {}
+spec:
+ type: NodePort
+ ports:
+ - name: mysql
+ port: 3306
+ protocol: TCP
+ targetPort: mysql
+ nodePort: 33306
+ selector:
+ app.kubernetes.io/name: mysql-db
+ app.kubernetes.io/release: ynydapp
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+---
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-mysql
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app.kubernetes.io/name: mysql-db
+ app.kubernetes.io/release: ynydapp
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+ serviceName: helm-mysql
+ updateStrategy:
+ type: RollingUpdate
+ template:
+ metadata:
+ annotations:
+ checksum/configuration: 6b60fa0f3a846a6ada8effdc4f823cf8003d42a8c8f630fe8b1b66d3454082dd
+ labels:
+ app.kubernetes.io/name: mysql-db
+ octopus.control: mysql-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: mysql
+ app.kubernetes.io/component: primary
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ serviceAccountName: helm-mysql
+ affinity: {}
+ nodeSelector:
+ mysql-deploy: "true"
+ securityContext:
+ fsGroup: 1001
+ initContainers:
+ - name: change-volume-permissions
+ image: 192.168.186.11:8033/cmii/bitnami-shell:11-debian-11-r136
+ imagePullPolicy: "Always"
+ command:
+ - /bin/bash
+ - -ec
+ - |
+ chown -R 1001:1001 /bitnami/mysql
+ securityContext:
+ runAsUser: 0
+ volumeMounts:
+ - name: mysql-data
+ mountPath: /bitnami/mysql
+ containers:
+ - name: mysql
+ image: 192.168.186.11:8033/cmii/mysql:8.1.0-debian-11-r42
+ imagePullPolicy: "IfNotPresent"
+ securityContext:
+ runAsUser: 1001
+ env:
+ - name: BITNAMI_DEBUG
+ value: "true"
+ - name: MYSQL_ROOT_PASSWORD
+ valueFrom:
+ secretKeyRef:
+ name: helm-mysql
+ key: mysql-root-password
+ - name: MYSQL_DATABASE
+ value: "cmii"
+ ports:
+ - name: mysql
+ containerPort: 3306
+ livenessProbe:
+ failureThreshold: 5
+ initialDelaySeconds: 120
+ periodSeconds: 10
+ successThreshold: 1
+ timeoutSeconds: 3
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - |
+ password_aux="${MYSQL_ROOT_PASSWORD:-}"
+ if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then
+ password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE")
+ fi
+ mysqladmin status -uroot -p"${password_aux}"
+ readinessProbe:
+ failureThreshold: 5
+ initialDelaySeconds: 30
+ periodSeconds: 10
+ successThreshold: 1
+ timeoutSeconds: 3
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - |
+ password_aux="${MYSQL_ROOT_PASSWORD:-}"
+ if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then
+ password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE")
+ fi
+ mysqladmin status -uroot -p"${password_aux}"
+ startupProbe:
+ failureThreshold: 60
+ initialDelaySeconds: 120
+ periodSeconds: 10
+ successThreshold: 1
+ timeoutSeconds: 1
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - |
+ password_aux="${MYSQL_ROOT_PASSWORD:-}"
+ if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then
+ password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE")
+ fi
+ mysqladmin status -uroot -p"${password_aux}"
+ resources:
+ limits: {}
+ requests: {}
+ volumeMounts:
+ - name: mysql-data
+ mountPath: /bitnami/mysql
+ - name: custom-init-scripts
+ mountPath: /docker-entrypoint-initdb.d
+ - name: config
+ mountPath: /opt/bitnami/mysql/conf/my.cnf
+ subPath: my.cnf
+ volumes:
+ - name: config
+ configMap:
+ name: helm-mysql
+ - name: custom-init-scripts
+ configMap:
+ name: helm-mysql-init-scripts
+ - name: mysql-data
+ hostPath:
+ path: /var/lib/docker/mysql-pv/ynydapp/
diff --git a/agent-common/real_project/ynydapp/k8s-nacos.yaml b/agent-common/real_project/ynydapp/k8s-nacos.yaml
new file mode 100644
index 0000000..81cc675
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-nacos.yaml
@@ -0,0 +1,130 @@
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-nacos-cm
+ namespace: ynydapp
+ labels:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ octopus.control: nacos-wdd
+ app.kubernetes.io/managed-by: Helm
+ app.kubernetes.io/version: 6.1.0
+data:
+ mysql.db.name: "cmii_nacos_config"
+ mysql.db.host: "helm-mysql"
+ mysql.port: "3306"
+ mysql.user: "k8s_admin"
+ mysql.password: "fP#UaH6qQ3)8"
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-nacos
+ namespace: ynydapp
+ labels:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ octopus.control: nacos-wdd
+ app.kubernetes.io/managed-by: Helm
+ app.kubernetes.io/version: 6.1.0
+spec:
+ type: NodePort
+ selector:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ ports:
+ - port: 8848
+ name: server
+ targetPort: 8848
+ nodePort: 38848
+ - port: 9848
+ name: server12
+ targetPort: 9848
+ - port: 9849
+ name: server23
+ targetPort: 9849
+---
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-nacos
+ namespace: ynydapp
+ labels:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ octopus.control: nacos-wdd
+ app.kubernetes.io/managed-by: Helm
+ app.kubernetes.io/version: 6.1.0
+spec:
+ serviceName: helm-nacos
+ replicas: 1
+ selector:
+ matchLabels:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ template:
+ metadata:
+ labels:
+ cmii.app: helm-nacos
+ cmii.type: middleware
+ octopus.control: nacos-wdd
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/version: 6.1.0
+ annotations:
+ pod.alpha.kubernetes.io/initialized: "true"
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ affinity: {}
+ containers:
+ - name: nacos-server
+ image: 192.168.186.11:8033/cmii/nacos-server:v2.1.2
+ ports:
+ - containerPort: 8848
+ name: dashboard
+ - containerPort: 9848
+ name: tcp-9848
+ - containerPort: 9849
+ name: tcp-9849
+ env:
+ - name: NACOS_AUTH_ENABLE
+ value: "false"
+ - name: NACOS_REPLICAS
+ value: "1"
+ - name: MYSQL_SERVICE_DB_NAME
+ valueFrom:
+ configMapKeyRef:
+ name: helm-nacos-cm
+ key: mysql.db.name
+ - name: MYSQL_SERVICE_PORT
+ valueFrom:
+ configMapKeyRef:
+ name: helm-nacos-cm
+ key: mysql.port
+ - name: MYSQL_SERVICE_USER
+ valueFrom:
+ configMapKeyRef:
+ name: helm-nacos-cm
+ key: mysql.user
+ - name: MYSQL_SERVICE_PASSWORD
+ valueFrom:
+ configMapKeyRef:
+ name: helm-nacos-cm
+ key: mysql.password
+ - name: MYSQL_SERVICE_HOST
+ valueFrom:
+ configMapKeyRef:
+ name: helm-nacos-cm
+ key: mysql.db.host
+ - name: NACOS_SERVER_PORT
+ value: "8848"
+ - name: NACOS_APPLICATION_PORT
+ value: "8848"
+ - name: PREFER_HOST_MODE
+ value: "hostname"
+ - name: MODE
+ value: standalone
+ - name: SPRING_DATASOURCE_PLATFORM
+ value: mysql
+---
diff --git a/agent-common/real_project/ynydapp/k8s-nfs-test.yaml b/agent-common/real_project/ynydapp/k8s-nfs-test.yaml
new file mode 100644
index 0000000..f892474
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-nfs-test.yaml
@@ -0,0 +1,38 @@
+---
+kind: PersistentVolumeClaim
+apiVersion: v1
+metadata:
+ name: test-claim
+ annotations:
+ volume.beta.kubernetes.io/storage-class: "nfs-prod-distribute" #与nfs-StorageClass.yaml metadata.name保持一致
+spec:
+ accessModes:
+ - ReadWriteOnce
+ storageClassName: nfs-prod-distribute
+ resources:
+ requests:
+ storage: 1Mi
+---
+kind: Pod
+apiVersion: v1
+metadata:
+ name: test-pod
+spec:
+ imagePullSecrets:
+ - name: harborsecret
+ containers:
+ - name: test-pod
+ image: 192.168.186.11:8033/cmii/busybox:latest
+ command:
+ - "/bin/sh"
+ args:
+ - "-c"
+ - "touch /mnt/NFS-CREATE-SUCCESS && exit 0 || exit 1" #创建一个SUCCESS文件后退出
+ volumeMounts:
+ - name: nfs-pvc
+ mountPath: "/mnt"
+ restartPolicy: "Never"
+ volumes:
+ - name: nfs-pvc
+ persistentVolumeClaim:
+ claimName: test-claim #与PVC名称保持一致
diff --git a/agent-common/real_project/ynydapp/k8s-nfs.yaml b/agent-common/real_project/ynydapp/k8s-nfs.yaml
new file mode 100644
index 0000000..c64acf4
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-nfs.yaml
@@ -0,0 +1,114 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: nfs-client-provisioner
+ # replace with namespace where provisioner is deployed
+ namespace: kube-system #根据实际环境设定namespace,下面类同
+---
+kind: ClusterRole
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: nfs-client-provisioner-runner
+rules:
+ - apiGroups: [""]
+ resources: ["persistentvolumes"]
+ verbs: ["get", "list", "watch", "create", "delete"]
+ - apiGroups: [""]
+ resources: ["persistentvolumeclaims"]
+ verbs: ["get", "list", "watch", "update"]
+ - apiGroups: ["storage.k8s.io"]
+ resources: ["storageclasses"]
+ verbs: ["get", "list", "watch"]
+ - apiGroups: [""]
+ resources: ["events"]
+ verbs: ["create", "update", "patch"]
+---
+kind: ClusterRoleBinding
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: run-nfs-client-provisioner
+subjects:
+ - kind: ServiceAccount
+ name: nfs-client-provisioner
+ # replace with namespace where provisioner is deployed
+ namespace: kube-system
+roleRef:
+ kind: ClusterRole
+# name: nfs-client-provisioner-runner
+ name: cluster-admin
+ apiGroup: rbac.authorization.k8s.io
+---
+kind: Role
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: leader-locking-nfs-client-provisioner
+ # replace with namespace where provisioner is deployed
+ namespace: kube-system
+rules:
+ - apiGroups: [""]
+ resources: ["endpoints"]
+ verbs: ["get", "list", "watch", "create", "update", "patch"]
+---
+kind: RoleBinding
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: leader-locking-nfs-client-provisioner
+subjects:
+ - kind: ServiceAccount
+ name: nfs-client-provisioner
+ # replace with namespace where provisioner is deployed
+ namespace: kube-system
+roleRef:
+ kind: Role
+ name: leader-locking-nfs-client-provisioner
+ apiGroup: rbac.authorization.k8s.io
+
+---
+apiVersion: storage.k8s.io/v1
+kind: StorageClass
+metadata:
+ name: nfs-prod-distribute
+provisioner: cmlc-nfs-storage #这里的名称要和provisioner配置文件中的环境变量PROVISIONER_NAME保持一致parameters: archiveOnDelete: "false"
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: nfs-client-provisioner
+ labels:
+ app: nfs-client-provisioner
+ # replace with namespace where provisioner is deployed
+ namespace: kube-system #与RBAC文件中的namespace保持一致
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app: nfs-client-provisioner
+ strategy:
+ type: Recreate
+ template:
+ metadata:
+ labels:
+ app: nfs-client-provisioner
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ serviceAccountName: nfs-client-provisioner
+ containers:
+ - name: nfs-client-provisioner
+ image: 192.168.186.11:8033/cmii/nfs-subdir-external-provisioner:v4.0.2
+ volumeMounts:
+ - name: nfs-client-root
+ mountPath: /persistentvolumes
+ env:
+ - name: PROVISIONER_NAME
+ value: cmlc-nfs-storage
+ - name: NFS_SERVER
+ value: 192.168.186.15
+ - name: NFS_PATH
+ value: /var/lib/docker/nfs_data
+ volumes:
+ - name: nfs-client-root
+ nfs:
+ server: 192.168.186.15
+ path: /var/lib/docker/nfs_data
diff --git a/agent-common/real_project/ynydapp/k8s-pvc.yaml b/agent-common/real_project/ynydapp/k8s-pvc.yaml
new file mode 100644
index 0000000..9941b09
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-pvc.yaml
@@ -0,0 +1,76 @@
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+ name: nfs-backend-log-pvc
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware-base
+ cmii.app: nfs-backend-log-pvc
+ helm.sh/chart: all-persistence-volume-claims-1.1.0
+ app.kubernetes.io/version: 6.1.0
+spec:
+ storageClassName: nfs-prod-distribute
+ accessModes:
+ - ReadWriteMany
+ volumeMode: Filesystem
+ resources:
+ requests:
+ storage: 100Gi
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+ name: helm-emqxs
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware-base
+ cmii.app: helm-emqxs
+ helm.sh/chart: all-persistence-volume-claims-1.1.0
+ app.kubernetes.io/version: 6.1.0
+spec:
+ storageClassName: nfs-prod-distribute
+ accessModes:
+ - ReadWriteMany
+ volumeMode: Filesystem
+ resources:
+ requests:
+ storage: 20Gi
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+ name: helm-mongo
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware-base
+ cmii.app: helm-mongo
+ helm.sh/chart: all-persistence-volume-claims-1.1.0
+ app.kubernetes.io/version: 6.1.0
+spec:
+ storageClassName: nfs-prod-distribute
+ accessModes:
+ - ReadWriteMany
+ volumeMode: Filesystem
+ resources:
+ requests:
+ storage: 30Gi
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+ name: helm-rabbitmq
+ namespace: ynydapp
+ labels:
+ cmii.type: middleware-base
+ cmii.app: helm-rabbitmq
+ helm.sh/chart: all-persistence-volume-claims-1.1.0
+ app.kubernetes.io/version: 6.1.0
+spec:
+ storageClassName: nfs-prod-distribute
+ accessModes:
+ - ReadWriteMany
+ volumeMode: Filesystem
+ resources:
+ requests:
+ storage: 20Gi
diff --git a/agent-common/real_project/ynydapp/k8s-rabbitmq.yaml b/agent-common/real_project/ynydapp/k8s-rabbitmq.yaml
new file mode 100644
index 0000000..7a49420
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-rabbitmq.yaml
@@ -0,0 +1,328 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: helm-rabbitmq
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+automountServiceAccountToken: true
+secrets:
+ - name: helm-rabbitmq
+---
+apiVersion: v1
+kind: Secret
+metadata:
+ name: helm-rabbitmq
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+type: Opaque
+data:
+ rabbitmq-password: "blljUk45MXIuX2hq"
+ rabbitmq-erlang-cookie: "emFBRmt1ZU1xMkJieXZvdHRYbWpoWk52UThuVXFzcTU="
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-rabbitmq-config
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+data:
+ rabbitmq.conf: |-
+ ## Username and password
+ ##
+ default_user = admin
+ default_pass = nYcRN91r._hj
+ ## Clustering
+ ##
+ cluster_formation.peer_discovery_backend = rabbit_peer_discovery_k8s
+ cluster_formation.k8s.host = kubernetes.default.svc.cluster.local
+ cluster_formation.node_cleanup.interval = 10
+ cluster_formation.node_cleanup.only_log_warning = true
+ cluster_partition_handling = autoheal
+ # queue master locator
+ queue_master_locator = min-masters
+ # enable guest user
+ loopback_users.guest = false
+ #default_vhost = default-vhost
+ #disk_free_limit.absolute = 50MB
+ #load_definitions = /app/load_definition.json
+---
+kind: Role
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: helm-rabbitmq-endpoint-reader
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+rules:
+ - apiGroups: [""]
+ resources: ["endpoints"]
+ verbs: ["get"]
+ - apiGroups: [""]
+ resources: ["events"]
+ verbs: ["create"]
+---
+kind: RoleBinding
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+ name: helm-rabbitmq-endpoint-reader
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+subjects:
+ - kind: ServiceAccount
+ name: helm-rabbitmq
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: Role
+ name: helm-rabbitmq-endpoint-reader
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-rabbitmq-headless
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+spec:
+ clusterIP: None
+ ports:
+ - name: epmd
+ port: 4369
+ targetPort: epmd
+ - name: amqp
+ port: 5672
+ targetPort: amqp
+ - name: dist
+ port: 25672
+ targetPort: dist
+ - name: dashboard
+ port: 15672
+ targetPort: stats
+ selector:
+ app.kubernetes.io/name: helm-rabbitmq
+ app.kubernetes.io/release: ynydapp
+ publishNotReadyAddresses: true
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-rabbitmq
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+spec:
+ type: NodePort
+ ports:
+ - name: amqp
+ port: 5672
+ targetPort: amqp
+ nodePort: 35672
+ - name: dashboard
+ port: 15672
+ targetPort: dashboard
+ nodePort: 36675
+ selector:
+ app.kubernetes.io/name: helm-rabbitmq
+ app.kubernetes.io/release: ynydapp
+---
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-rabbitmq
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+spec:
+ serviceName: helm-rabbitmq-headless
+ podManagementPolicy: OrderedReady
+ replicas: 1
+ updateStrategy:
+ type: RollingUpdate
+ selector:
+ matchLabels:
+ app.kubernetes.io/name: helm-rabbitmq
+ app.kubernetes.io/release: ynydapp
+ template:
+ metadata:
+ labels:
+ app.kubernetes.io/name: helm-rabbitmq
+ helm.sh/chart: rabbitmq-8.26.1
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: rabbitmq
+ annotations:
+ checksum/config: d6c2caa9572f64a06d9f7daa34c664a186b4778cd1697ef8e59663152fc628f1
+ checksum/secret: d764e7b3d999e7324d1afdfec6140092a612f04b6e0306818675815cec2f454f
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ serviceAccountName: helm-rabbitmq
+ affinity: {}
+ securityContext:
+ fsGroup: 5001
+ runAsUser: 5001
+ terminationGracePeriodSeconds: 120
+ initContainers:
+ - name: volume-permissions
+ image: 192.168.186.11:8033/cmii/bitnami-shell:11-debian-11-r136
+ imagePullPolicy: "Always"
+ command:
+ - /bin/bash
+ args:
+ - -ec
+ - |
+ mkdir -p "/bitnami/rabbitmq/mnesia"
+ chown -R "5001:5001" "/bitnami/rabbitmq/mnesia"
+ securityContext:
+ runAsUser: 0
+ resources:
+ limits: {}
+ requests: {}
+ volumeMounts:
+ - name: data
+ mountPath: /bitnami/rabbitmq/mnesia
+ containers:
+ - name: rabbitmq
+ image: 192.168.186.11:8033/cmii/rabbitmq:3.9.12-debian-10-r3
+ imagePullPolicy: "Always"
+ env:
+ - name: BITNAMI_DEBUG
+ value: "false"
+ - name: MY_POD_IP
+ valueFrom:
+ fieldRef:
+ fieldPath: status.podIP
+ - name: MY_POD_NAME
+ valueFrom:
+ fieldRef:
+ fieldPath: metadata.name
+ - name: MY_POD_NAMESPACE
+ valueFrom:
+ fieldRef:
+ fieldPath: metadata.namespace
+ - name: K8S_SERVICE_NAME
+ value: "helm-rabbitmq-headless"
+ - name: K8S_ADDRESS_TYPE
+ value: hostname
+ - name: RABBITMQ_FORCE_BOOT
+ value: "no"
+ - name: RABBITMQ_NODE_NAME
+ value: "rabbit@$(MY_POD_NAME).$(K8S_SERVICE_NAME).$(MY_POD_NAMESPACE).svc.cluster.local"
+ - name: K8S_HOSTNAME_SUFFIX
+ value: ".$(K8S_SERVICE_NAME).$(MY_POD_NAMESPACE).svc.cluster.local"
+ - name: RABBITMQ_MNESIA_DIR
+ value: "/bitnami/rabbitmq/mnesia/$(RABBITMQ_NODE_NAME)"
+ - name: RABBITMQ_LDAP_ENABLE
+ value: "no"
+ - name: RABBITMQ_LOGS
+ value: "-"
+ - name: RABBITMQ_ULIMIT_NOFILES
+ value: "65536"
+ - name: RABBITMQ_USE_LONGNAME
+ value: "true"
+ - name: RABBITMQ_ERL_COOKIE
+ valueFrom:
+ secretKeyRef:
+ name: helm-rabbitmq
+ key: rabbitmq-erlang-cookie
+ - name: RABBITMQ_LOAD_DEFINITIONS
+ value: "no"
+ - name: RABBITMQ_SECURE_PASSWORD
+ value: "yes"
+ - name: RABBITMQ_USERNAME
+ value: "admin"
+ - name: RABBITMQ_PASSWORD
+ valueFrom:
+ secretKeyRef:
+ name: helm-rabbitmq
+ key: rabbitmq-password
+ - name: RABBITMQ_PLUGINS
+ value: "rabbitmq_management, rabbitmq_peer_discovery_k8s, rabbitmq_shovel, rabbitmq_shovel_management, rabbitmq_auth_backend_ldap"
+ ports:
+ - name: amqp
+ containerPort: 5672
+ - name: dist
+ containerPort: 25672
+ - name: dashboard
+ containerPort: 15672
+ - name: epmd
+ containerPort: 4369
+ livenessProbe:
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - rabbitmq-diagnostics -q ping
+ initialDelaySeconds: 120
+ periodSeconds: 30
+ timeoutSeconds: 20
+ successThreshold: 1
+ failureThreshold: 6
+ readinessProbe:
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - rabbitmq-diagnostics -q check_running && rabbitmq-diagnostics -q check_local_alarms
+ initialDelaySeconds: 10
+ periodSeconds: 30
+ timeoutSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ lifecycle:
+ preStop:
+ exec:
+ command:
+ - /bin/bash
+ - -ec
+ - |
+ if [[ -f /opt/bitnami/scripts/rabbitmq/nodeshutdown.sh ]]; then
+ /opt/bitnami/scripts/rabbitmq/nodeshutdown.sh -t "120" -d "false"
+ else
+ rabbitmqctl stop_app
+ fi
+ resources:
+ limits: {}
+ requests: {}
+ volumeMounts:
+ - name: configuration
+ mountPath: /bitnami/rabbitmq/conf
+ - name: data
+ mountPath: /bitnami/rabbitmq/mnesia
+ volumes:
+ - name: configuration
+ configMap:
+ name: helm-rabbitmq-config
+ items:
+ - key: rabbitmq.conf
+ path: rabbitmq.conf
+ - name: data
+ persistentVolumeClaim:
+ claimName: helm-rabbitmq
diff --git a/agent-common/real_project/ynydapp/k8s-redis.yaml b/agent-common/real_project/ynydapp/k8s-redis.yaml
new file mode 100644
index 0000000..32ce1f2
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-redis.yaml
@@ -0,0 +1,585 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+automountServiceAccountToken: true
+metadata:
+ name: helm-redis
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+---
+apiVersion: v1
+kind: Secret
+metadata:
+ name: helm-redis
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+type: Opaque
+data:
+ redis-password: "TWNhY2hlQDQ1MjI="
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-redis-configuration
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+data:
+ redis.conf: |-
+ # User-supplied common configuration:
+ # Enable AOF https://redis.io/topics/persistence#append-only-file
+ appendonly yes
+ # Disable RDB persistence, AOF persistence already enabled.
+ save ""
+ # End of common configuration
+ master.conf: |-
+ dir /data
+ # User-supplied master configuration:
+ rename-command FLUSHDB ""
+ rename-command FLUSHALL ""
+ # End of master configuration
+ replica.conf: |-
+ dir /data
+ slave-read-only yes
+ # User-supplied replica configuration:
+ rename-command FLUSHDB ""
+ rename-command FLUSHALL ""
+ # End of replica configuration
+---
+# Source: outside-deploy/charts/redis-db/templates/health-configmap.yaml
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-redis-health
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+data:
+ ping_readiness_local.sh: |-
+ #!/bin/bash
+
+ [[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
+ [[ -n "$REDIS_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_PASSWORD"
+ response=$(
+ timeout -s 3 $1 \
+ redis-cli \
+ -h localhost \
+ -p $REDIS_PORT \
+ ping
+ )
+ if [ "$response" != "PONG" ]; then
+ echo "$response"
+ exit 1
+ fi
+ ping_liveness_local.sh: |-
+ #!/bin/bash
+
+ [[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
+ [[ -n "$REDIS_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_PASSWORD"
+ response=$(
+ timeout -s 3 $1 \
+ redis-cli \
+ -h localhost \
+ -p $REDIS_PORT \
+ ping
+ )
+ if [ "$response" != "PONG" ] && [ "$response" != "LOADING Redis is loading the dataset in memory" ]; then
+ echo "$response"
+ exit 1
+ fi
+ ping_readiness_master.sh: |-
+ #!/bin/bash
+
+ [[ -f $REDIS_MASTER_PASSWORD_FILE ]] && export REDIS_MASTER_PASSWORD="$(< "${REDIS_MASTER_PASSWORD_FILE}")"
+ [[ -n "$REDIS_MASTER_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_MASTER_PASSWORD"
+ response=$(
+ timeout -s 3 $1 \
+ redis-cli \
+ -h $REDIS_MASTER_HOST \
+ -p $REDIS_MASTER_PORT_NUMBER \
+ ping
+ )
+ if [ "$response" != "PONG" ]; then
+ echo "$response"
+ exit 1
+ fi
+ ping_liveness_master.sh: |-
+ #!/bin/bash
+
+ [[ -f $REDIS_MASTER_PASSWORD_FILE ]] && export REDIS_MASTER_PASSWORD="$(< "${REDIS_MASTER_PASSWORD_FILE}")"
+ [[ -n "$REDIS_MASTER_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_MASTER_PASSWORD"
+ response=$(
+ timeout -s 3 $1 \
+ redis-cli \
+ -h $REDIS_MASTER_HOST \
+ -p $REDIS_MASTER_PORT_NUMBER \
+ ping
+ )
+ if [ "$response" != "PONG" ] && [ "$response" != "LOADING Redis is loading the dataset in memory" ]; then
+ echo "$response"
+ exit 1
+ fi
+ ping_readiness_local_and_master.sh: |-
+ script_dir="$(dirname "$0")"
+ exit_status=0
+ "$script_dir/ping_readiness_local.sh" $1 || exit_status=$?
+ "$script_dir/ping_readiness_master.sh" $1 || exit_status=$?
+ exit $exit_status
+ ping_liveness_local_and_master.sh: |-
+ script_dir="$(dirname "$0")"
+ exit_status=0
+ "$script_dir/ping_liveness_local.sh" $1 || exit_status=$?
+ "$script_dir/ping_liveness_master.sh" $1 || exit_status=$?
+ exit $exit_status
+---
+# Source: outside-deploy/charts/redis-db/templates/scripts-configmap.yaml
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: helm-redis-scripts
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+data:
+ start-master.sh: |
+ #!/bin/bash
+
+ [[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
+ if [[ ! -f /opt/bitnami/redis/etc/master.conf ]];then
+ cp /opt/bitnami/redis/mounted-etc/master.conf /opt/bitnami/redis/etc/master.conf
+ fi
+ if [[ ! -f /opt/bitnami/redis/etc/redis.conf ]];then
+ cp /opt/bitnami/redis/mounted-etc/redis.conf /opt/bitnami/redis/etc/redis.conf
+ fi
+ ARGS=("--port" "${REDIS_PORT}")
+ ARGS+=("--requirepass" "${REDIS_PASSWORD}")
+ ARGS+=("--masterauth" "${REDIS_PASSWORD}")
+ ARGS+=("--include" "/opt/bitnami/redis/etc/redis.conf")
+ ARGS+=("--include" "/opt/bitnami/redis/etc/master.conf")
+ exec redis-server "${ARGS[@]}"
+ start-replica.sh: |
+ #!/bin/bash
+
+ get_port() {
+ hostname="$1"
+ type="$2"
+
+ port_var=$(echo "${hostname^^}_SERVICE_PORT_$type" | sed "s/-/_/g")
+ port=${!port_var}
+
+ if [ -z "$port" ]; then
+ case $type in
+ "SENTINEL")
+ echo 26379
+ ;;
+ "REDIS")
+ echo 6379
+ ;;
+ esac
+ else
+ echo $port
+ fi
+ }
+
+ get_full_hostname() {
+ hostname="$1"
+ echo "${hostname}.${HEADLESS_SERVICE}"
+ }
+
+ REDISPORT=$(get_port "$HOSTNAME" "REDIS")
+
+ [[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
+ [[ -f $REDIS_MASTER_PASSWORD_FILE ]] && export REDIS_MASTER_PASSWORD="$(< "${REDIS_MASTER_PASSWORD_FILE}")"
+ if [[ ! -f /opt/bitnami/redis/etc/replica.conf ]];then
+ cp /opt/bitnami/redis/mounted-etc/replica.conf /opt/bitnami/redis/etc/replica.conf
+ fi
+ if [[ ! -f /opt/bitnami/redis/etc/redis.conf ]];then
+ cp /opt/bitnami/redis/mounted-etc/redis.conf /opt/bitnami/redis/etc/redis.conf
+ fi
+
+ echo "" >> /opt/bitnami/redis/etc/replica.conf
+ echo "replica-announce-port $REDISPORT" >> /opt/bitnami/redis/etc/replica.conf
+ echo "replica-announce-ip $(get_full_hostname "$HOSTNAME")" >> /opt/bitnami/redis/etc/replica.conf
+ ARGS=("--port" "${REDIS_PORT}")
+ ARGS+=("--slaveof" "${REDIS_MASTER_HOST}" "${REDIS_MASTER_PORT_NUMBER}")
+ ARGS+=("--requirepass" "${REDIS_PASSWORD}")
+ ARGS+=("--masterauth" "${REDIS_MASTER_PASSWORD}")
+ ARGS+=("--include" "/opt/bitnami/redis/etc/redis.conf")
+ ARGS+=("--include" "/opt/bitnami/redis/etc/replica.conf")
+ exec redis-server "${ARGS[@]}"
+---
+# Source: outside-deploy/charts/redis-db/templates/headless-svc.yaml
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-redis-headless
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+spec:
+ type: ClusterIP
+ clusterIP: None
+ ports:
+ - name: tcp-redis
+ port: 6379
+ targetPort: redis
+ selector:
+ app.kubernetes.io/name: redis-db
+ app.kubernetes.io/release: ynydapp
+---
+# Source: outside-deploy/charts/redis-db/templates/master/service.yaml
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-redis-master
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: redis
+ app.kubernetes.io/component: master
+spec:
+ type: ClusterIP
+
+ ports:
+ - name: tcp-redis
+ port: 6379
+ targetPort: redis
+ nodePort: null
+ selector:
+ app.kubernetes.io/name: redis-db
+ app.kubernetes.io/release: ynydapp
+ cmii.type: middleware
+ cmii.app: redis
+ app.kubernetes.io/component: master
+---
+# Source: outside-deploy/charts/redis-db/templates/replicas/service.yaml
+apiVersion: v1
+kind: Service
+metadata:
+ name: helm-redis-replicas
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/component: replica
+spec:
+ type: ClusterIP
+ ports:
+ - name: tcp-redis
+ port: 6379
+ targetPort: redis
+ nodePort: null
+ selector:
+ app.kubernetes.io/name: redis-db
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/component: replica
+---
+# Source: outside-deploy/charts/redis-db/templates/master/statefulset.yaml
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-redis-master
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: redis
+ app.kubernetes.io/component: master
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app.kubernetes.io/name: redis-db
+ app.kubernetes.io/release: ynydapp
+ cmii.type: middleware
+ cmii.app: redis
+ app.kubernetes.io/component: master
+ serviceName: helm-redis-headless
+ updateStrategy:
+ rollingUpdate: {}
+ type: RollingUpdate
+ template:
+ metadata:
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ cmii.type: middleware
+ cmii.app: redis
+ app.kubernetes.io/component: master
+ annotations:
+ checksum/configmap: b64aa5db67e6e63811f3c1095b9fce34d83c86a471fccdda0e48eedb53a179b0
+ checksum/health: 6e0a6330e5ac63e565ae92af1444527d72d8897f91266f333555b3d323570623
+ checksum/scripts: b88df93710b7c42a76006e20218f05c6e500e6cc2affd4bb1985832f03166e98
+ checksum/secret: 43f1b0e20f9cb2de936bd182bc3683b720fc3cf4f4e76cb23c06a52398a50e8d
+ spec:
+ affinity: {}
+ securityContext:
+ fsGroup: 1001
+ serviceAccountName: helm-redis
+ imagePullSecrets:
+ - name: harborsecret
+ terminationGracePeriodSeconds: 30
+ containers:
+ - name: redis
+ image: 192.168.186.11:8033/cmii/redis:6.2.6-debian-10-r0
+ imagePullPolicy: "Always"
+ securityContext:
+ runAsUser: 1001
+ command:
+ - /bin/bash
+ args:
+ - -c
+ - /opt/bitnami/scripts/start-scripts/start-master.sh
+ env:
+ - name: BITNAMI_DEBUG
+ value: "false"
+ - name: REDIS_REPLICATION_MODE
+ value: master
+ - name: ALLOW_EMPTY_PASSWORD
+ value: "no"
+ - name: REDIS_PASSWORD
+ valueFrom:
+ secretKeyRef:
+ name: helm-redis
+ key: redis-password
+ - name: REDIS_TLS_ENABLED
+ value: "no"
+ - name: REDIS_PORT
+ value: "6379"
+ ports:
+ - name: redis
+ containerPort: 6379
+ livenessProbe:
+ initialDelaySeconds: 20
+ periodSeconds: 5
+ # One second longer than command timeout should prevent generation of zombie processes.
+ timeoutSeconds: 6
+ successThreshold: 1
+ failureThreshold: 5
+ exec:
+ command:
+ - sh
+ - -c
+ - /health/ping_liveness_local.sh 5
+ readinessProbe:
+ initialDelaySeconds: 20
+ periodSeconds: 5
+ timeoutSeconds: 2
+ successThreshold: 1
+ failureThreshold: 5
+ exec:
+ command:
+ - sh
+ - -c
+ - /health/ping_readiness_local.sh 1
+ resources:
+ limits:
+ cpu: "2"
+ memory: 8Gi
+ requests:
+ cpu: "2"
+ memory: 8Gi
+ volumeMounts:
+ - name: start-scripts
+ mountPath: /opt/bitnami/scripts/start-scripts
+ - name: health
+ mountPath: /health
+ - name: redis-data
+ mountPath: /data
+ subPath:
+ - name: config
+ mountPath: /opt/bitnami/redis/mounted-etc
+ - name: redis-tmp-conf
+ mountPath: /opt/bitnami/redis/etc/
+ - name: tmp
+ mountPath: /tmp
+ volumes:
+ - name: start-scripts
+ configMap:
+ name: helm-redis-scripts
+ defaultMode: 0755
+ - name: health
+ configMap:
+ name: helm-redis-health
+ defaultMode: 0755
+ - name: config
+ configMap:
+ name: helm-redis-configuration
+ - name: redis-tmp-conf
+ emptyDir: {}
+ - name: tmp
+ emptyDir: {}
+ - name: redis-data
+ emptyDir: {}
+---
+# Source: outside-deploy/charts/redis-db/templates/replicas/statefulset.yaml
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: helm-redis-replicas
+ namespace: ynydapp
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/component: replica
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app.kubernetes.io/name: redis-db
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/component: replica
+ serviceName: helm-redis-headless
+ updateStrategy:
+ rollingUpdate: {}
+ type: RollingUpdate
+ template:
+ metadata:
+ labels:
+ app.kubernetes.io/name: redis-db
+ octopus.control: redis-db-wdd
+ app.kubernetes.io/release: ynydapp
+ app.kubernetes.io/managed-by: octopus
+ app.kubernetes.io/component: replica
+ annotations:
+ checksum/configmap: b64aa5db67e6e63811f3c1095b9fce34d83c86a471fccdda0e48eedb53a179b0
+ checksum/health: 6e0a6330e5ac63e565ae92af1444527d72d8897f91266f333555b3d323570623
+ checksum/scripts: b88df93710b7c42a76006e20218f05c6e500e6cc2affd4bb1985832f03166e98
+ checksum/secret: 43f1b0e20f9cb2de936bd182bc3683b720fc3cf4f4e76cb23c06a52398a50e8d
+ spec:
+ imagePullSecrets:
+ - name: harborsecret
+ securityContext:
+ fsGroup: 1001
+ serviceAccountName: helm-redis
+ terminationGracePeriodSeconds: 30
+ containers:
+ - name: redis
+ image: 192.168.186.11:8033/cmii/redis:6.2.6-debian-10-r0
+ imagePullPolicy: "Always"
+ securityContext:
+ runAsUser: 1001
+ command:
+ - /bin/bash
+ args:
+ - -c
+ - /opt/bitnami/scripts/start-scripts/start-replica.sh
+ env:
+ - name: BITNAMI_DEBUG
+ value: "false"
+ - name: REDIS_REPLICATION_MODE
+ value: slave
+ - name: REDIS_MASTER_HOST
+ value: helm-redis-master-0.helm-redis-headless.ynydapp.svc.cluster.local
+ - name: REDIS_MASTER_PORT_NUMBER
+ value: "6379"
+ - name: ALLOW_EMPTY_PASSWORD
+ value: "no"
+ - name: REDIS_PASSWORD
+ valueFrom:
+ secretKeyRef:
+ name: helm-redis
+ key: redis-password
+ - name: REDIS_MASTER_PASSWORD
+ valueFrom:
+ secretKeyRef:
+ name: helm-redis
+ key: redis-password
+ - name: REDIS_TLS_ENABLED
+ value: "no"
+ - name: REDIS_PORT
+ value: "6379"
+ ports:
+ - name: redis
+ containerPort: 6379
+ livenessProbe:
+ initialDelaySeconds: 20
+ periodSeconds: 5
+ timeoutSeconds: 6
+ successThreshold: 1
+ failureThreshold: 5
+ exec:
+ command:
+ - sh
+ - -c
+ - /health/ping_liveness_local_and_master.sh 5
+ readinessProbe:
+ initialDelaySeconds: 20
+ periodSeconds: 5
+ timeoutSeconds: 2
+ successThreshold: 1
+ failureThreshold: 5
+ exec:
+ command:
+ - sh
+ - -c
+ - /health/ping_readiness_local_and_master.sh 1
+ resources:
+ limits:
+ cpu: "2"
+ memory: 8Gi
+ requests:
+ cpu: "2"
+ memory: 8Gi
+ volumeMounts:
+ - name: start-scripts
+ mountPath: /opt/bitnami/scripts/start-scripts
+ - name: health
+ mountPath: /health
+ - name: redis-data
+ mountPath: /data
+ subPath:
+ - name: config
+ mountPath: /opt/bitnami/redis/mounted-etc
+ - name: redis-tmp-conf
+ mountPath: /opt/bitnami/redis/etc
+ volumes:
+ - name: start-scripts
+ configMap:
+ name: helm-redis-scripts
+ defaultMode: 0755
+ - name: health
+ configMap:
+ name: helm-redis-health
+ defaultMode: 0755
+ - name: config
+ configMap:
+ name: helm-redis-configuration
+ - name: redis-tmp-conf
+ emptyDir: {}
+ - name: redis-data
+ emptyDir: {}
+
diff --git a/agent-common/real_project/ynydapp/k8s-srs.yaml b/agent-common/real_project/ynydapp/k8s-srs.yaml
new file mode 100644
index 0000000..ec61936
--- /dev/null
+++ b/agent-common/real_project/ynydapp/k8s-srs.yaml
@@ -0,0 +1,496 @@
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: helm-live-srs-cm
+ namespace: ynydapp
+ labels:
+ cmii.app: live-srs
+ cmii.type: live
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+ helm.sh/chart: cmlc-live-srs-rtc-2.0.0
+data:
+ srs.rtc.conf: |-
+ listen 31935;
+ max_connections 4096;
+ srs_log_tank console;
+ srs_log_level info;
+ srs_log_file /home/srs.log;
+ daemon off;
+ http_api {
+ enabled on;
+ listen 1985;
+ crossdomain on;
+ }
+ stats {
+ network 0;
+ }
+ http_server {
+ enabled on;
+ listen 8080;
+ dir /home/hls;
+ }
+ srt_server {
+ enabled on;
+ listen 30556;
+ maxbw 1000000000;
+ connect_timeout 4000;
+ peerlatency 600;
+ recvlatency 600;
+ }
+ rtc_server {
+ enabled on;
+ listen 30090;
+ candidate $CANDIDATE;
+ }
+ vhost __defaultVhost__ {
+ http_hooks {
+ enabled on;
+ on_publish http://helm-live-op-svc-v2:8080/hooks/on_push;
+ }
+ http_remux {
+ enabled on;
+ }
+ rtc {
+ enabled on;
+ rtmp_to_rtc on;
+ rtc_to_rtmp on;
+ keep_bframe off;
+ }
+ tcp_nodelay on;
+ min_latency on;
+ play {
+ gop_cache off;
+ mw_latency 100;
+ mw_msgs 10;
+ }
+ publish {
+ firstpkt_timeout 8000;
+ normal_timeout 4000;
+ mr on;
+ }
+ dvr {
+ enabled off;
+ dvr_path /home/dvr/[app]/[stream]/[2006][01]/[timestamp].mp4;
+ dvr_plan session;
+ }
+ hls {
+ enabled on;
+ hls_path /home/hls;
+ hls_fragment 10;
+ hls_window 60;
+ hls_m3u8_file [app]/[stream].m3u8;
+ hls_ts_file [app]/[stream]/[2006][01][02]/[timestamp]-[duration].ts;
+ hls_cleanup on;
+ hls_entry_prefix http://39.129.174.66:8088;
+ }
+ }
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: helm-live-srs-svc-exporter
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+spec:
+ ports:
+ - name: rtmp
+ protocol: TCP
+ port: 31935
+ targetPort: 31935
+ nodePort: 31935
+ - name: rtc
+ protocol: UDP
+ port: 30090
+ targetPort: 30090
+ nodePort: 30090
+ - name: rtc-tcp
+ protocol: TCP
+ port: 30090
+ targetPort: 30090
+ nodePort: 30090
+ - name: srt
+ protocol: UDP
+ port: 30556
+ targetPort: 30556
+ nodePort: 30556
+ - name: api
+ protocol: TCP
+ port: 1985
+ targetPort: 1985
+ nodePort: 30080
+ selector:
+ srs-role: rtc
+ type: NodePort
+ sessionAffinity: None
+ externalTrafficPolicy: Cluster
+
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: helm-live-srs-svc
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+spec:
+ ports:
+ - name: http
+ protocol: TCP
+ port: 8080
+ targetPort: 8080
+ - name: api
+ protocol: TCP
+ port: 1985
+ targetPort: 1985
+ selector:
+ srs-role: rtc
+ type: ClusterIP
+ sessionAffinity: None
+
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: helm-live-srsrtc-svc
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+spec:
+ ports:
+ - name: rtmp
+ protocol: TCP
+ port: 31935
+ targetPort: 31935
+ selector:
+ srs-role: rtc
+ type: ClusterIP
+ sessionAffinity: None
+
+---
+kind: StatefulSet
+apiVersion: apps/v1
+metadata:
+ name: helm-live-srs-rtc
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+ cmii.app: live-srs
+ cmii.type: live
+ helm.sh/chart: cmlc-live-srs-rtc-2.0.0
+ srs-role: rtc
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ srs-role: rtc
+ template:
+ metadata:
+ labels:
+ srs-role: rtc
+ spec:
+ volumes:
+ - name: srs-conf-file
+ configMap:
+ name: helm-live-srs-cm
+ items:
+ - key: srs.rtc.conf
+ path: docker.conf
+ defaultMode: 420
+ - name: srs-vol
+ emptyDir:
+ sizeLimit: 8Gi
+ containers:
+ - name: srs-rtc
+ image: 192.168.186.11:8033/cmii/srs:v5.0.195
+ ports:
+ - name: srs-rtmp
+ containerPort: 31935
+ protocol: TCP
+ - name: srs-api
+ containerPort: 1985
+ protocol: TCP
+ - name: srs-flv
+ containerPort: 8080
+ protocol: TCP
+ - name: srs-webrtc
+ containerPort: 30090
+ protocol: UDP
+ - name: srs-webrtc-tcp
+ containerPort: 30090
+ protocol: TCP
+ - name: srs-srt
+ containerPort: 30556
+ protocol: UDP
+ env:
+ - name: CANDIDATE
+ value: 39.129.174.66
+ resources:
+ limits:
+ cpu: 2000m
+ memory: 4Gi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ volumeMounts:
+ - name: srs-conf-file
+ mountPath: /usr/local/srs/conf/docker.conf
+ subPath: docker.conf
+ - name: srs-vol
+ mountPath: /home/dvr
+ subPath: ynydapp/helm-live/dvr
+ - name: srs-vol
+ mountPath: /home/hls
+ subPath: ynydapp/helm-live/hls
+ terminationMessagePath: /dev/termination-log
+ terminationMessagePolicy: File
+ imagePullPolicy: Always
+ - name: oss-adaptor
+ image: 192.168.186.11:8033/cmii/cmii-srs-oss-adaptor:2023-SA
+ env:
+ - name: OSS_ENDPOINT
+ value: 'http://192.168.186.15:9000'
+ - name: OSS_AK
+ value: cmii
+ - name: OSS_SK
+ value: 'B#923fC7mk'
+ - name: OSS_BUCKET
+ value: live-cluster-hls
+ - name: SRS_OP
+ value: 'http://helm-live-op-svc-v2:8080'
+ - name: MYSQL_ENDPOINT
+ value: 'helm-mysql:3306'
+ - name: MYSQL_USERNAME
+ value: k8s_admin
+ - name: MYSQL_PASSWORD
+ value: fP#UaH6qQ3)8
+ - name: MYSQL_DATABASE
+ value: cmii_live_srs_op
+ - name: MYSQL_TABLE
+ value: live_segment
+ - name: LOG_LEVEL
+ value: info
+ - name: OSS_META
+ value: 'yes'
+ resources:
+ limits:
+ cpu: 2000m
+ memory: 4Gi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ volumeMounts:
+ - name: srs-vol
+ mountPath: /cmii/share/hls
+ subPath: ynydapp/helm-live/hls
+ terminationMessagePath: /dev/termination-log
+ terminationMessagePolicy: File
+ imagePullPolicy: Always
+ restartPolicy: Always
+ terminationGracePeriodSeconds: 30
+ dnsPolicy: ClusterFirst
+ securityContext: {}
+ imagePullSecrets:
+ - name: harborsecret
+ affinity: {}
+ schedulerName: default-scheduler
+ serviceName: helm-live-srsrtc-svc
+ podManagementPolicy: OrderedReady
+ updateStrategy:
+ type: RollingUpdate
+ rollingUpdate:
+ partition: 0
+ revisionHistoryLimit: 10
+---
+# live-srs部分
+---
+kind: Deployment
+apiVersion: apps/v1
+metadata:
+ name: helm-live-op-v2
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+ cmii.app: live-engine
+ cmii.type: live
+ helm.sh/chart: cmlc-live-live-op-2.0.0
+ live-role: op-v2
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ live-role: op-v2
+ template:
+ metadata:
+ labels:
+ live-role: op-v2
+ spec:
+ volumes:
+ - name: srs-conf-file
+ configMap:
+ name: helm-live-op-cm-v2
+ items:
+ - key: live.op.conf
+ path: bootstrap.yaml
+ defaultMode: 420
+ containers:
+ - name: helm-live-op-v2
+ image: 192.168.186.11:8033/cmii/cmii-live-operator:5.2.0
+ ports:
+ - name: operator
+ containerPort: 8080
+ protocol: TCP
+ resources:
+ limits:
+ cpu: 4800m
+ memory: 4Gi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ volumeMounts:
+ - name: srs-conf-file
+ mountPath: /cmii/bootstrap.yaml
+ subPath: bootstrap.yaml
+ livenessProbe:
+ httpGet:
+ path: /cmii/health
+ port: 8080
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ readinessProbe:
+ httpGet:
+ path: /cmii/health
+ port: 8080
+ scheme: HTTP
+ initialDelaySeconds: 60
+ timeoutSeconds: 5
+ periodSeconds: 20
+ successThreshold: 1
+ failureThreshold: 3
+ terminationMessagePath: /dev/termination-log
+ terminationMessagePolicy: File
+ imagePullPolicy: Always
+ restartPolicy: Always
+ terminationGracePeriodSeconds: 30
+ dnsPolicy: ClusterFirst
+ securityContext: {}
+ imagePullSecrets:
+ - name: harborsecret
+ affinity: {}
+ schedulerName: default-scheduler
+ strategy:
+ type: RollingUpdate
+ rollingUpdate:
+ maxUnavailable: 25%
+ maxSurge: 25%
+ revisionHistoryLimit: 10
+ progressDeadlineSeconds: 600
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: helm-live-op-svc-v2
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+spec:
+ ports:
+ - protocol: TCP
+ port: 8080
+ targetPort: 8080
+ nodePort: 30333
+ selector:
+ live-role: op-v2
+ type: NodePort
+ sessionAffinity: None
+---
+kind: Service
+apiVersion: v1
+metadata:
+ name: helm-live-op-svc
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+spec:
+ ports:
+ - protocol: TCP
+ port: 8080
+ targetPort: 8080
+ selector:
+ live-role: op
+ type: ClusterIP
+ sessionAffinity: None
+---
+kind: ConfigMap
+apiVersion: v1
+metadata:
+ name: helm-live-op-cm-v2
+ namespace: ynydapp
+ labels:
+ octopus.control: wdd
+ app.kubernetes.io/managed-by: octopus
+ cmii.app: live-engine
+ cmii.type: live
+data:
+ live.op.conf: |-
+ server:
+ port: 8080
+ spring:
+ main:
+ allow-bean-definition-overriding: true
+ allow-circular-references: true
+ application:
+ name: cmii-live-operator
+ platform:
+ info:
+ name: cmii-live-operator
+ description: cmii-live-operator
+ version: 6.1.0
+ scanPackage: com.cmii.live.op
+ cloud:
+ nacos:
+ config:
+ username: nacos
+ password: KingKong@95461234
+ server-addr: helm-nacos:8848
+ extension-configs:
+ - data-id: cmii-live-operator.yml
+ group: 6.1.0
+ refresh: true
+ shared-configs:
+ - data-id: cmii-backend-system.yml
+ group: 6.1.0
+ refresh: true
+ discovery:
+ enabled: false
+
+ live:
+ engine:
+ type: srs
+ endpoint: 'http://helm-live-srs-svc:1985'
+ proto:
+ rtmp: 'rtmp://39.129.174.66:31935'
+ rtsp: 'rtsp://39.129.174.66:30554'
+ srt: 'srt://39.129.174.66:30556'
+ flv: 'http://39.129.174.66:30500'
+ hls: 'http://39.129.174.66:30500'
+ rtc: 'webrtc://39.129.174.66:30080'
+ replay: 'https://39.129.174.66:30333'
+ minio:
+ endpoint: http://192.168.186.15:9000
+ access-key: cmii
+ secret-key: B#923fC7mk
+ bucket: live-cluster-hls
diff --git a/agent-operator/CmiiDeployOperator_test.go b/agent-operator/CmiiDeployOperator_test.go
index 9e43fa5..e57581b 100755
--- a/agent-operator/CmiiDeployOperator_test.go
+++ b/agent-operator/CmiiDeployOperator_test.go
@@ -237,6 +237,27 @@ func TestCmiiEnvDeploy_YunNanErJiPingTai(t *testing.T) {
}
+func TestCmiiEnvDeploy_LiaoNingYingJiPingTai(t *testing.T) {
+
+ // 辽宁应急
+ commonEnv := &z_dep.CommonEnvironmentConfig{
+ WebIP: "192.168.40.42",
+ WebPort: "8088",
+ HarborIPOrCustomImagePrefix: "192.168.40.42",
+ HarborPort: "8033",
+ Namespace: "lnydyj",
+ TagVersion: "6.2.0",
+ TenantEnv: "",
+ MinioPublicIP: "",
+ MinioInnerIP: "192.168.40.193",
+ NFSServerIP: "192.168.40.193",
+ ApplyFilePrefix: "",
+ }
+
+ CmiiEnvDeployOffline(commonEnv, true, real_project.Cmii620ImageList)
+
+}
+
func TestCmiiEnvDeploy_ZhuHaiHengQing(t *testing.T) {
// 珠海横琴
@@ -321,6 +342,27 @@ func TestCmiiEnvDeploy_GanSuErJiPingTai(t *testing.T) {
}
+func TestCmiiEnvDeploy_YunNanErJiXin(t *testing.T) {
+
+ // 云南二级平台-新
+ commonEnv := &z_dep.CommonEnvironmentConfig{
+ WebIP: "39.129.174.66",
+ WebPort: "8088",
+ HarborIPOrCustomImagePrefix: "192.168.186.11",
+ HarborPort: "8033",
+ Namespace: "ynydapp",
+ TagVersion: "6.1.0",
+ TenantEnv: "",
+ MinioPublicIP: "",
+ MinioInnerIP: "192.168.186.15",
+ NFSServerIP: "192.168.186.15",
+ ApplyFilePrefix: "",
+ }
+
+ CmiiEnvDeployOffline(commonEnv, true, real_project.Cmii611ImageList)
+
+}
+
func TestCmiiNewAppDeploy(t *testing.T) {
deployNamespace := config.DevOperation
diff --git a/agent-operator/CmiiImageSyncOperator_test.go b/agent-operator/CmiiImageSyncOperator_test.go
index 4e89c10..8a480b4 100644
--- a/agent-operator/CmiiImageSyncOperator_test.go
+++ b/agent-operator/CmiiImageSyncOperator_test.go
@@ -51,8 +51,8 @@ func TestPullFromEntityAndSyncConditionally(t *testing.T) {
sync := ImageSyncEntity{
DownloadCondition: &DownloadEntity{
ShouldDownloadImage: true,
- ProjectName: "cmii_6.1.1",
- ProjectVersion: "6.1.1",
+ ProjectName: "cmii_6.2.0",
+ ProjectVersion: "6.2.0-demo",
CmiiNameTagList: []string{
//"cmii-uav-mqtthandler:5.4.0-bjdyt-052102",
},
diff --git a/agent-operator/CmiiK8sHigherOperator.go b/agent-operator/CmiiK8sHigherOperator.go
index 45a37f6..2a0a063 100644
--- a/agent-operator/CmiiK8sHigherOperator.go
+++ b/agent-operator/CmiiK8sHigherOperator.go
@@ -301,6 +301,9 @@ func RestartCmiiFrontendDeployment(cmiiEnv string) {
// UpdateCmiiDeploymentImageTag 更新CMII的Deployment的Tag号
func UpdateCmiiDeploymentImageTag(cmiiEnv, appName, newTag string) (updateOK bool, oldImageTag, newImageTag string) {
+ // trim space
+ newTag = strings.TrimSpace(newTag)
+
cmiiDeploymentInterface := DefaultCmiiOperator.DeploymentOneInterface(cmiiEnv, appName)
if cmiiDeploymentInterface == nil {
return updateOK, oldImageTag, newImageTag
diff --git a/agent-operator/CmiiK8sHigherOperator_test.go b/agent-operator/CmiiK8sHigherOperator_test.go
index 9afb79f..cbf16b1 100644
--- a/agent-operator/CmiiK8sHigherOperator_test.go
+++ b/agent-operator/CmiiK8sHigherOperator_test.go
@@ -235,7 +235,7 @@ func TestBackupAllDeploymentFromEnv(t *testing.T) {
}
-func TestBackUpAllCmiiAppImageNameFroEnv(t *testing.T) {
+func TestBackUpAllCmiiAppImageNameFromEnv(t *testing.T) {
BackUpAllCmiiAppImageNameFromEnv(config.Demo)
@@ -243,18 +243,13 @@ func TestBackUpAllCmiiAppImageNameFroEnv(t *testing.T) {
func TestRestartCmiiDeployment(t *testing.T) {
cmiiEnv := config.Demo
- appNameList := []string{
- "cmii-uav-industrial-portfolio",
- }
+ appName := "cmii-uav-notice"
- for _, appName := range appNameList {
- kill := DefaultCmiiOperator.DeploymentRestartByKill(cmiiEnv, appName)
- assert.Equal(t, kill, true, "have unhealthy pod !")
-
- check := DefaultCmiiOperator.DeploymentStatusCheck(cmiiEnv, appName, 180)
- assert.Equal(t, check, true, "DeploymentStatusCheck failed !")
- }
+ kill := DefaultCmiiOperator.DeploymentRestartByKill(cmiiEnv, appName)
+ assert.Equal(t, kill, true, "have unhealthy pod !")
+ check := DefaultCmiiOperator.DeploymentStatusCheck(cmiiEnv, appName, 180)
+ assert.Equal(t, check, true, "DeploymentStatusCheck failed !")
}
func TestUpdateCmiiDeploymentImageTag(t *testing.T) {
@@ -263,7 +258,7 @@ func TestUpdateCmiiDeploymentImageTag(t *testing.T) {
now := time.Now()
- targetTime := time.Date(now.Year(), now.Month(), now.Day(), 17, 46, 00, 0, now.Location())
+ targetTime := time.Date(now.Year(), now.Month(), now.Day(), 13, 45, 00, 0, now.Location())
duration := time.Duration(0)
@@ -283,10 +278,10 @@ func TestUpdateCmiiDeploymentImageTag(t *testing.T) {
cmiiEnv := config.Demo
appNameTagMap := map[string]string{
- //"cmii-uas-lifecycle": "6.1.0-20241125-121201",
- //"cmii-uav-ruoyi": "2024121102",
- //"cmii-uav-platform-dispatchh5": "6.1.0",
- "cmii-uav-industrial-portfolio": "6.1.0-20241125-121301",
+ //"cmii-app-release": "6.1.0-122001",
+ "cmii-uav-device": "6.1.0-1125-123001",
+ //"cmii-uav-platform-pilot2-to-cloud": "6.1.0-32030-123101",
+ //"cmii-uav-industrial-portfolio": "6.1.0-20241125-121702",
}
for appName, newTag := range appNameTagMap {
diff --git a/agent-operator/CmiiK8sOperator_test.go b/agent-operator/CmiiK8sOperator_test.go
index b446ba9..bb6c92c 100644
--- a/agent-operator/CmiiK8sOperator_test.go
+++ b/agent-operator/CmiiK8sOperator_test.go
@@ -117,15 +117,6 @@ func TestCmiiK8sOperator_DeploymentRestartByKill(t *testing.T) {
}
-func TestCmiiK8sOperator_DeploymentOneInterface(t *testing.T) {
- start := time.Now()
- deploy := DefaultCmiiOperator.DeploymentOneInterface("devflight", "cmii-uav-depotautoreturn")
- elapsed := time.Since(start).Milliseconds()
- fmt.Printf("执行耗时: %d ms\n", elapsed)
-
- utils.BeautifulPrint(*deploy)
-}
-
func TestCmiiK8sOperator_ReplicaSetExists(t *testing.T) {
cmiiEnv := "uavcloud-devflight"
@@ -181,10 +172,10 @@ func TestCmiiK8sOperator_PodFizz(t *testing.T) {
}
func TestCmiiK8sOperator_PodByAppName(t *testing.T) {
- cmiiEnv := "Uat"
+ cmiiEnv := config.Demo
appName := "cmii-admin-data"
- exists := DefaultCmiiOperator.PodByNodeName(cmiiEnv, appName)
+ exists := DefaultCmiiOperator.PodByAppName(cmiiEnv, appName)
for _, podInterface := range exists {
utils.BeautifulPrint(podInterface)
@@ -319,3 +310,13 @@ func TestCmiiK8sOperator_NodeAllInterface(t *testing.T) {
println()
}
}
+
+func TestCmiiK8sOperator_DeploymentOneInterface(t *testing.T) {
+
+ namespace := config.Demo
+ appName := "cmii-uav-mission"
+
+ deploy := DefaultCmiiOperator.DeploymentOneInterface(namespace, appName)
+
+ utils.BeautifulPrint(*deploy)
+}