-
Bug
-
Resolution: Not a Bug
-
Medium
-
Beijing Release
-
None
-
None
-
POD-25
JIRA to track the issue discussed in email.
Mike – Yes, both ports are exposed. I also see below note à
You can override this default name with label or environment variable SERVICE_NAME or SERVICE_x_NAME, where x is the internal exposed port. Note that if a container has multiple exposed ports then setting SERVICE_NAME will still result in multiple services named SERVICE_NAME-<exposed port>.
This sounds like the plugin should be using similar logic to determine service name registered?
Below is inspect output.
ubuntu@onap-dcae-bootstrap:~$ sudo docker inspect 534d02bb153b
[
{
"Id": "534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32",
"Created": "2018-04-30T03:28:09.06620985Z",
"Path": "/bin/sh",
"Args": [
"-c",
"/opt/tca/restart.sh"
],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 10877,
"ExitCode": 0,
"Error": "",
"StartedAt": "2018-04-30T03:28:09.390654754Z",
"FinishedAt": "0001-01-01T00:00:00Z"
},
"Image": "sha256:0b6330510d9355899bb356f32fa1a90836715b1b68ba16f75ff7d74348b210d4",
"ResolvConfPath": "/var/lib/docker/containers/534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32/hostname",
"HostsPath": "/var/lib/docker/containers/534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32/hosts",
"LogPath": "/var/lib/docker/containers/534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32/534d02bb153b762191e8ceb5ef59534982a8c069e77c83494e1679e3b1dd4d32-json.log",
"Name": "/37484b23d6ce48c6a721cb53f735d7ff_dcaegen2-analytics_tca",
"RestartCount": 0,
"Driver": "aufs",
"MountLabel": "",
"ProcessLabel": "",
"AppArmorProfile": "docker-default",
"ExecIDs": null,
"HostConfig": {
"Binds": null,
"ContainerIDFile": "",
"LogConfig": {
"Type": "json-file",
"Config": {}
},
"NetworkMode": "default",
"PortBindings": {
"11011/tcp": [
{
"HostIp": "",
"HostPort": "32016"
}
]
},
"RestartPolicy": {
"Name": "",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": null,
"CapAdd": null,
"CapDrop": null,
"Dns": [
"10.0.4.1"
],
"DnsOptions": null,
"DnsSearch": [
"service.consul"
],
"ExtraHosts": [
"consul:10.0.4.1"
],
"GroupAdd": null,
"IpcMode": "",
"Cgroup": "",
"Links": null,
"OomScoreAdj": 0,
"PidMode": "",
"Privileged": false,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": null,
"UTSMode": "",
"UsernsMode": "",
"ShmSize": 67108864,
"Runtime": "runc",
"ConsoleSize": [
0,
0
],
"Isolation": "",
"CpuShares": 0,
"Memory": 0,
"NanoCpus": 0,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": null,
"BlkioDeviceReadBps": null,
"BlkioDeviceWriteBps": null,
"BlkioDeviceReadIOps": null,
"BlkioDeviceWriteIOps": null,
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuRealtimePeriod": 0,
"CpuRealtimeRuntime": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": null,
"DeviceCgroupRules": null,
"DiskQuota": 0,
"KernelMemory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"MemorySwappiness": -1,
"OomKillDisable": false,
"PidsLimit": 0,
"Ulimits": null,
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0
},
"GraphDriver": {
"Data": null,
"Name": "aufs"
},
"Mounts": [],
"Config": {
"Hostname": "534d02bb153b",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"ExposedPorts": {
"11011/tcp": {},
"11015/tcp": {}
},
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"AAIHOST=aai",
"CBS_HOST=config-binding-service",
"SERVICE_TAGS=tcadocker-service-7",
"CBS_PORT=10000",
"SERVICE_CHECK_INTERVAL=15s",
"CONSUL_PORT=8500",
"DMAAPPUBTOPIC=unauthenticated.DCAE_CL_OUTPUT",
"SERVICE_CHECK_HTTP=/",
"CONSUL_HOST=10.12.5.130",
"SERVICE_11015_IGNORE=true",
"DMAAPSUBTOPIC=unauthenticated.SEC_MEASUREMENT_OUTPUT",
"SERVICE_NAME=37484b23d6ce48c6a721cb53f735d7ff_dcaegen2-analytics_tca",
"HOSTNAME=37484b23d6ce48c6a721cb53f735d7ff_dcaegen2-analytics_tca",
"CONFIG_BINDING_SERVICE=config_binding_service",
"AAIPORT=30233",
"DMAAPPORT=30227",
"SERVICE_CHECK_TIMEOUT=1s",
"DMAAPHOST=10.12.5.32",
"PATH=/opt/cdap/sdk/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"GOSU_VERSION=1.7"
],
"Cmd": null,
"ArgsEscaped": true,
"Image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.0.0",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": [
"/bin/sh",
"-c",
"/opt/tca/restart.sh"
],
"OnBuild": null,
"Labels": {}
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "8ad5c4c26b96a54a1714463c8d4c5ea9f7d200bb335956869c057dce0a215473",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": {
"11011/tcp": [
{
"HostIp": "0.0.0.0",
"HostPort": "32016"
}
],
"11015/tcp": null
},
"SandboxKey": "/var/run/docker/netns/8ad5c4c26b96",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "d09f9e533891c7c6d4e5edc037ac56da4650fa4527dceffdadc08343ea0cf903",
"Gateway": "172.17.0.1",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "172.17.0.3",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"MacAddress": "02:42:ac:11:00:03",
"Networks": {
"bridge": {
"IPAMConfig": null,
"Links": null,
"Aliases": null,
"NetworkID": "94a66e6a73cf44a731e726bd5151cbffce34c29c26c99803addf7662f0ef1a55",
"EndpointID": "d09f9e533891c7c6d4e5edc037ac56da4650fa4527dceffdadc08343ea0cf903",
"Gateway": "172.17.0.1",
"IPAddress": "172.17.0.3",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:11:00:03"
}
}
}
}
]
From: HWANG, MICHAEL Sent: Monday, April 30, 2018 11:10 AM To: VENKATESH KUMAR, VIJAY <vv770d@att.com>; JI, LUSHENG <lji@research.att.com>; LUCAS, JACK <jflucas@research.att.com>; SHATOV, ALEXANDER V <alexs@research.att.com> Subject: Re: location_id for heat deploy
My quick answer/early suspicion is that this is a registrator bug. If you see here: https://gliderlabs.com/registrator/latest/user/services/ under "service name". The logic is: <base(container-image)>[-<exposed-port> if >1 ports] That list of ports is not filtering the "ignored" ports. I've seen this in the code too way back when. and/or a docker issue where 11015 is not being mapped. Can I see the "docker inspect" on the container?
On 04/30/2018 10:51 AM, VENKATESH KUMAR, VIJAY wrote:
Mike – For #1 – no services were registered for tca
For #2 – the consul had 11011 tagged part of service name (which doesn’t match to what docker plugin is looking).
Below is from o/p of docker ps and corresponding consul entry for new deploy.
ubuntu@onap-dcae-bootstrap:~$ sudo docker ps | grep tca
534d02bb153b nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.0.0 "/bin/sh c /opt/t..." 11 hours ago Up 11 hours 11015/tcp, 0.0.0.0:32016>11011/tcp 37484b23d6ce48c6a721cb53f735d7ff_dcaegen2-analytics_tca
Consul registered Service name à
37484b23d6ce48c6a721cb53f735d7ff_dcaegen2-analytics_tca-11011
From: HWANG, MICHAEL Sent: Monday, April 30, 2018 10:44 AM To: VENKATESH KUMAR, VIJAY <vv770d@att.com>; JI, LUSHENG <lji@research.att.com>; LUCAS, JACK <jflucas@research.att.com>; SHATOV, ALEXANDER V <alexs@research.att.com> Subject: Re: location_id for heat deploy
Docker plugin looks for "4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca" Are there any other entries in Consul for this service? Maybe there's an additional port that needs to be ignored? Can you do a "docker ps" on that container and see port mappings?
On 04/26/2018 03:13 PM, VENKATESH KUMAR, VIJAY wrote:
Mike – Sorry, missed to respond.. This could be related to port (attached the blueprint)
Tried two different deployment – both reported with same error from plugin
- Ignore both ports to registrator; this cause no service to be added into consul.
SERVICE_11011_IGNORE: "true"
SERVICE_11015_IGNORE: "true"
- Ignoring 11015 from registrator caused service to be added into consul, however it was have port 11011 tagged at the end. (E.g - 4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca-11011)
What does the docker plugin lookup based on?
Below are logs.
$ cfy install tcadocker-service.yaml -b tcadocker-service-5 -d tcadocker-service-5 -i tcadocker-service-input.yaml
Uploading blueprint tcadocker-service.yaml...
tcadocker-service... |################################################| 100.0%
Blueprint uploaded. The blueprint's id is tcadocker-service-5
Creating new deployment from blueprint tcadocker-service-5...
Deployment created. The deployment's id is tcadocker-service-5
Executing workflow install on deployment tcadocker-service-5 [timeout=900 seconds]
Deployment environment creation is pending...
2018-04-26 16:39:36.822 CFY <tcadocker-service-5> Starting 'create_deployment_environment' workflow execution
2018-04-26 16:39:37.210 CFY <tcadocker-service-5> Installing deployment plugins
2018-04-26 16:39:37.210 CFY <tcadocker-service-5> [,] Sending task 'cloudify_agent.operations.install_plugins'
2018-04-26 16:39:37.210 CFY <tcadocker-service-5> [,] Task started 'cloudify_agent.operations.install_plugins'
2018-04-26 16:39:37.119 LOG <tcadocker-service-5> [,] INFO: Installing plugin: relationships
2018-04-26 16:39:38.273 LOG <tcadocker-service-5> [,] INFO: Using existing installation of managed plugin: bf812bca-97fb-44fb-a6a1-3ee1c8de167d [package_name: relationshipplugin, package_version: 1.0.0, supported_platform: any]
2018-04-26 16:39:38.273 LOG <tcadocker-service-5> [,] INFO: Installing plugin: docker
2018-04-26 16:39:39.279 LOG <tcadocker-service-5> [,] INFO: Using existing installation of managed plugin: 230cf891-80d8-4625-bce8-c83cf6151bf3 [package_name: dockerplugin, package_version: 3.2.0, supported_platform: any]
2018-04-26 16:39:39.279 LOG <tcadocker-service-5> [,] INFO: Installing plugin: dcaepolicy
2018-04-26 16:39:39.279 LOG <tcadocker-service-5> [,] INFO: Using existing installation of managed plugin: f47778f1-a4a4-4de8-a3c9-3f9bc42702a3 [package_name: dcaepolicyplugin, package_version: 2.3.0, supported_platform: any]
2018-04-26 16:39:39.327 CFY <tcadocker-service-5> [,] Task succeeded 'cloudify_agent.operations.install_plugins'
2018-04-26 16:39:40.214 CFY <tcadocker-service-5> Skipping starting deployment policy engine core - no policies defined
2018-04-26 16:39:40.214 CFY <tcadocker-service-5> Creating deployment work directory
2018-04-26 16:39:40.214 CFY <tcadocker-service-5> 'create_deployment_environment' workflow execution succeeded
2018-04-26 16:39:43.278 CFY <tcadocker-service-5> Starting 'install' workflow execution
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [docker_service_host_n1cstb] Creating node
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [tca_policy_ov7vcv] Creating node
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [docker_service_host_n1cstb.create] Sending task 'dockerplugin.select_docker_host'
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [docker_service_host_n1cstb.create] Task started 'dockerplugin.select_docker_host'
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [tca_policy_ov7vcv.create] Sending task 'dcaepolicyplugin.policy_get'
2018-04-26 16:39:44.218 CFY <tcadocker-service-5> [tca_policy_ov7vcv.create] Task started 'dcaepolicyplugin.policy_get'
2018-04-26 16:39:44.644 LOG <tcadocker-service-5> [docker_service_host_n1cstb.create] INFO: Selected Docker host: dockerhost
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: getting service_url at http://consul:8500/v1/catalog/service/policy_handler
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: got 200 for service_url at http://consul:8500/v1/catalog/service/policy_handler response: [{"ID":"576e6572-e64e-fc34-ffe0-6f70020d17ce","Node":"consul","Address":"172.18.0.4","Datacenter":"dc1","TaggedAddresses":{"lan":"172.18.0.4","wan":"172.18.0.4"},"NodeMeta":{},"ServiceID":"1984e2eb1fe8:policy-handler:25577","ServiceName":"policy_handler","ServiceTags":[],"ServiceAddress":"10.0.4.1","ServicePort":25577,"ServiceEnableTagOverride":false,"CreateIndex":574,"ModifyIndex":584}]
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: getting latest policy from http://10.0.4.1:25577/policy_latest/CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw headers={"X-ECOMP-RequestID": "11517562-ee07-4e6d-86bf-1b848b50353b"}
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: found policy CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw: {"policy_body": {"policyName": "CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw.2.xml", "policyConfigMessage": "Config Retrieved! ", "responseAttributes": {}, "policyConfigStatus": "CONFIG_RETRIEVED", "matchingConditions": {"ONAPName": "DCAE", "uuid": "test", "Location": "SampleServiceLocation", "service": "tca_policy", "ConfigName": "SampleConfigName"}, "policyType": "MicroService", "config": {"policyScope": "CLAMP", "description": "MicroService Policy", "service": "tca_policy", "policyName": "CLAMPClampTest_v1_0.ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw", "riskLevel": "1", "priority": "1", "riskType": "SampleRiskType", "guard": "False", "version": "1.1.0", "location": "SampleServiceLocation", "configName": "SampleConfigName", "content": {"tca_policy": {"domain": "measurementsForVfScaling", "metricsPerEventName": [{"policyName": "CLAMPClampTest_v1_0.ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw", "policyScope": "DCAE", "thresholds": [{"direction": "GREATER", "severity": "MAJOR", "closedLoopControlName": "ClosedLoop-129c1289-44a4-11e8-a3fe-0242ac120003_null", "fieldPath": "$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedBroadcastPacketsAccumulated", "version": "1.0.2", "closedLoopEventStatus": "ONSET", "thresholdValue": 10}], "eventName": "vCPEvGMUXPacketLoss", "controlLoopSchemaType": "VM", "policyVersion": "v0.0.1"}]}}, "templateVersion": "OpenSource.version.1", "uuid": "test"}, "property": null, "type": "JSON", "policyVersion": "2"}, "policy_id": "CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw"}
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: latest policy for policy_id(CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw) status(200) response: {"policy_body": {"policyName": "CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw.2.xml", "policyConfigMessage": "Config Retrieved! ", "responseAttributes": {}, "policyConfigStatus": "CONFIG_RETRIEVED", "type": "JSON", "policyType": "MicroService", "matchingConditions": {"Location": "SampleServiceLocation", "ONAPName": "DCAE", "ConfigName": "SampleConfigName", "service": "tca_policy", "uuid": "test"}, "property": null, "config": {"policyScope": "CLAMP", "uuid": "test", "service": "tca_policy", "policyName": "CLAMPClampTest_v1_0.ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw", "riskLevel": "1", "description": "MicroService Policy", "priority": "1", "riskType": "SampleRiskType", "guard": "False", "version": "1.1.0", "location": "SampleServiceLocation", "content": {"tca_policy": {"domain": "measurementsForVfScaling", "metricsPerEventName": [{"policyName": "CLAMPClampTest_v1_0.ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw", "policyScope": "DCAE", "thresholds": [{"direction": "GREATER", "severity": "MAJOR", "closedLoopControlName": "ClosedLoop-129c1289-44a4-11e8-a3fe-0242ac120003_null", "fieldPath": "$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedBroadcastPacketsAccumulated", "version": "1.0.2", "closedLoopEventStatus": "ONSET", "thresholdValue": 10}], "eventName": "vCPEvGMUXPacketLoss", "controlLoopSchemaType": "VM", "policyVersion": "v0.0.1"}]}}, "templateVersion": "OpenSource.version.1", "configName": "SampleConfigName"}, "policyVersion": "2"}, "policy_id": "CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw"}
2018-04-26 16:39:45.308 LOG <tcadocker-service-5> [tca_policy_ov7vcv.create] INFO: exit policy_get
2018-04-26 16:39:45.223 CFY <tcadocker-service-5> [docker_service_host_n1cstb.create] Task succeeded 'dockerplugin.select_docker_host'
2018-04-26 16:39:45.223 CFY <tcadocker-service-5> [tca_policy_ov7vcv.create] Task succeeded 'dcaepolicyplugin.policy_get'
2018-04-26 16:39:46.226 CFY <tcadocker-service-5> [tca_policy_ov7vcv] Configuring node
2018-04-26 16:39:46.226 CFY <tcadocker-service-5> [docker_service_host_n1cstb] Configuring node
2018-04-26 16:39:46.226 CFY <tcadocker-service-5> [tca_policy_ov7vcv] Starting node
2018-04-26 16:39:46.226 CFY <tcadocker-service-5> [docker_service_host_n1cstb] Starting node
2018-04-26 16:39:47.229 CFY <tcadocker-service-5> [tca_docker_me9xei] Creating node
2018-04-26 16:39:47.229 CFY <tcadocker-service-5> [tca_docker_me9xei.create] Sending task 'dockerplugin.create_for_components_with_streams'
2018-04-26 16:39:47.229 CFY <tcadocker-service-5> [tca_docker_me9xei.create] Task started 'dockerplugin.create_for_components_with_streams'
2018-04-26 16:39:47.591 LOG <tcadocker-service-5> [tca_docker_me9xei.create] WARNING: failed to find service_component_name to store_policies in consul-kv
2018-04-26 16:39:48.314 LOG <tcadocker-service-5> [tca_docker_me9xei.create] INFO: Added config for 4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca
2018-04-26 16:39:48.314 LOG <tcadocker-service-5> [tca_docker_me9xei.create] INFO: Done setting up: 4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca
2018-04-26 16:39:48.314 LOG <tcadocker-service-5> [tca_docker_me9xei.create] INFO: response 200 for store_policies http://consul:8500/v1/txn: text={"Results":[\{"KV":\{"LockIndex":0,"Key":"4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca:policies/event","Flags":0,"Value":null,"CreateIndex":40408,"ModifyIndex":40408}},\{"KV":\{"LockIndex":0,"Key":"4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca:policies/items/CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw","Flags":0,"Value":null,"CreateIndex":40408,"ModifyIndex":40408}}],"Errors":null} txn=[\{"KV": \{"Verb": "delete-tree", "Key": "4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca:policies/"}}, \{"KV": \{"Verb": "set", "Value": "eyJhY3Rpb24iOiAiZ2F0aGVyZWQiLCAidGltZXN0YW1wIjogIjIwMTgtMDQtMjZUMTY6Mzk6NDcuNTkyWiIsICJ1cGRhdGVfaWQiOiAiMzY2NDMwNDMtMGJmNi00MjcwLWJlYzctY2JmOGEyOWU1NmIzIiwgInBvbGljaWVzX2NvdW50IjogMX0=", "Key": "4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca:policies/event"}}, \{"KV": \{"Verb": "set", "Value": "eyJwb2xpY3lOYW1lIjogIkNMQU1QQ2xhbXBUZXN0X3YxXzAuQ29uZmlnX01TX0Nsb3NlZExvb3BfMTI5YzEyODlfNDRhNF8xMWU4X2EzZmVfMDI0MmFjMTIwMDAzX1RDQV8xZDEzdW53LjIueG1sIiwgInBvbGljeUNvbmZpZ01lc3NhZ2UiOiAiQ29uZmlnIFJldHJpZXZlZCEgIiwgInJlc3BvbnNlQXR0cmlidXRlcyI6IHt9LCAicG9saWN5Q29uZmlnU3RhdHVzIjogIkNPTkZJR19SRVRSSUVWRUQiLCAibWF0Y2hpbmdDb25kaXRpb25zIjogeyJPTkFQTmFtZSI6ICJEQ0FFIiwgIkNvbmZpZ05hbWUiOiAiU2FtcGxlQ29uZmlnTmFtZSIsICJ1dWlkIjogInRlc3QiLCAic2VydmljZSI6ICJ0Y2FfcG9saWN5IiwgIkxvY2F0aW9uIjogIlNhbXBsZVNlcnZpY2VMb2NhdGlvbiJ9LCAicG9saWN5VHlwZSI6ICJNaWNyb1NlcnZpY2UiLCAiY29uZmlnIjogeyJwb2xpY3lTY29wZSI6ICJDTEFNUCIsICJkZXNjcmlwdGlvbiI6ICJNaWNyb1NlcnZpY2UgUG9saWN5IiwgInNlcnZpY2UiOiAidGNhX3BvbGljeSIsICJwb2xpY3lOYW1lIjogIkNMQU1QQ2xhbXBUZXN0X3YxXzAuQ2xvc2VkTG9vcF8xMjljMTI4OV80NGE0XzExZThfYTNmZV8wMjQyYWMxMjAwMDNfVENBXzFkMTN1bnciLCAicmlza0xldmVsIjogIjEiLCAicHJpb3JpdHkiOiAiMSIsICJyaXNrVHlwZSI6ICJTYW1wbGVSaXNrVHlwZSIsICJndWFyZCI6ICJGYWxzZSIsICJ2ZXJzaW9uIjogIjEuMS4wIiwgImxvY2F0aW9uIjogIlNhbXBsZVNlcnZpY2VMb2NhdGlvbiIsICJjb25maWdOYW1lIjogIlNhbXBsZUNvbmZpZ05hbWUiLCAiY29udGVudCI6IHsidGNhX3BvbGljeSI6IHsiZG9tYWluIjogIm1lYXN1cmVtZW50c0ZvclZmU2NhbGluZyIsICJtZXRyaWNzUGVyRXZlbnROYW1lIjogW3sicG9saWN5TmFtZSI6ICJDTEFNUENsYW1wVGVzdF92MV8wLkNsb3NlZExvb3BfMTI5YzEyODlfNDRhNF8xMWU4X2EzZmVfMDI0MmFjMTIwMDAzX1RDQV8xZDEzdW53IiwgInBvbGljeVNjb3BlIjogIkRDQUUiLCAidGhyZXNob2xkcyI6IFt7ImRpcmVjdGlvbiI6ICJHUkVBVEVSIiwgInNldmVyaXR5IjogIk1BSk9SIiwgImNsb3NlZExvb3BDb250cm9sTmFtZSI6ICJDbG9zZWRMb29wLTEyOWMxMjg5LTQ0YTQtMTFlOC1hM2ZlLTAyNDJhYzEyMDAwM19udWxsIiwgImZpZWxkUGF0aCI6ICIkLmV2ZW50Lm1lYXN1cmVtZW50c0ZvclZmU2NhbGluZ0ZpZWxkcy52TmljUGVyZm9ybWFuY2VBcnJheVsqXS5yZWNlaXZlZEJyb2FkY2FzdFBhY2tldHNBY2N1bXVsYXRlZCIsICJ2ZXJzaW9uIjogIjEuMC4yIiwgImNsb3NlZExvb3BFdmVudFN0YXR1cyI6ICJPTlNFVCIsICJ0aHJlc2hvbGRWYWx1ZSI6IDEwfV0sICJldmVudE5hbWUiOiAidkNQRXZHTVVYUGFja2V0TG9zcyIsICJjb250cm9sTG9vcFNjaGVtYVR5cGUiOiAiVk0iLCAicG9saWN5VmVyc2lvbiI6ICJ2MC4wLjEifV19fSwgInRlbXBsYXRlVmVyc2lvbiI6ICJPcGVuU291cmNlLnZlcnNpb24uMSIsICJ1dWlkIjogInRlc3QifSwgInByb3BlcnR5IjogbnVsbCwgInR5cGUiOiAiSlNPTiIsICJwb2xpY3lWZXJzaW9uIjogIjIifQ==", "Key": "4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca:policies/items/CLAMPClampTest_v1_0.Config_MS_ClosedLoop_129c1289_44a4_11e8_a3fe_0242ac120003_TCA_1d13unw"}}]
2018-04-26 16:39:48.236 CFY <tcadocker-service-5> [tca_docker_me9xei.create] Task succeeded 'dockerplugin.create_for_components_with_streams'
2018-04-26 16:39:48.236 CFY <tcadocker-service-5> [preconfigure] Sending task 'relationshipplugin.forward_destination_info'
2018-04-26 16:39:48.236 CFY <tcadocker-service-5> [preconfigure] Task started 'relationshipplugin.forward_destination_info'
2018-04-26 16:39:49.318 LOG <tcadocker-service-5> [preconfigure] INFO: Forwarding selected target: tca_docker_me9xei
2018-04-26 16:39:49.239 CFY <tcadocker-service-5> [preconfigure] Task succeeded 'relationshipplugin.forward_destination_info'
2018-04-26 16:39:49.239 CFY <tcadocker-service-5> [tca_docker_me9xei] Configuring node
2018-04-26 16:39:50.244 CFY <tcadocker-service-5> [tca_docker_me9xei] Starting node
2018-04-26 16:39:50.244 CFY <tcadocker-service-5> [tca_docker_me9xei.start] Sending task 'dockerplugin.create_and_start_container_for_components_with_streams'
2018-04-26 16:39:50.244 CFY <tcadocker-service-5> [tca_docker_me9xei.start] Task started 'dockerplugin.create_and_start_container_for_components_with_streams'
2018-04-26 16:39:51.253 LOG <tcadocker-service-5> [tca_docker_me9xei.start] INFO: Docker container config: {'Env': [u'AAIHOST=aai', u'CBS_HOST=config-binding-service', u'SERVICE_TAGS=tcadocker-service-5', u'CBS_PORT=10000', u'SERVICE_CHECK_INTERVAL=15s', u'CONSUL_PORT=8500', u'DMAAPPUBTOPIC=unauthenticated.DCAE_CL_OUTPUT', u'SERVICE_CHECK_HTTP=/', u'CONSUL_HOST=10.12.5.130', u'SERVICE_11015_IGNORE=true', u'DMAAPSUBTOPIC=unauthenticated.SEC_MEASUREMENT_OUTPUT', u'SERVICE_NAME=4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca', u'HOSTNAME=4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca', u'CONFIG_BINDING_SERVICE=config_binding_service', u'AAIPORT=30233', u'DMAAPPORT=30227', u'SERVICE_CHECK_TIMEOUT=1s', u'DMAAPHOST=10.12.5.32'], 'Hostname': None, 'StopSignal': None, 'Entrypoint': None, 'Dns': None, 'Memory': None, 'OpenStdin': False, 'MacAddress': None, 'CpusetCpus': None, 'Cpuset': None, 'User': None, 'VolumeDriver': None, 'CpuShares': None, 'NetworkingConfig': None, 'AttachStdout': False, 'NetworkDisabled': False, 'WorkingDir': None, 'Cmd': [], 'StdinOnce': False, 'AttachStdin': False, 'Volumes': None, 'MemorySwap': None, 'VolumesFrom': None, 'Tty': False, 'AttachStderr': False, 'Domainname': None, 'Image': u'nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.0.0', 'Labels': None, 'HostConfig': {'NetworkMode': 'default', 'PortBindings': {'11011/tcp': [\{'HostPort': '32014', 'HostIp': ''}]}, 'DnsSearch': ['service.consul'], 'Dns': [u'10.0.4.1'], 'ExtraHosts': ['consul:10.0.4.1']}, 'ExposedPorts': {'11011/tcp': {}}}
2018-04-26 16:39:52.321 LOG <tcadocker-service-5> [tca_docker_me9xei.start] INFO: Container started: 639f79a78841e86a3e22fb3a8a68122cfff0095e6fd0e41bd6581c4b8c0c560d
2018-04-26 16:39:52.321 LOG <tcadocker-service-5> [tca_docker_me9xei.start] INFO: Container started: 639f79a78841e86a3e22fb3a8a68122cfff0095e6fd0e41bd6581c4b8c0c560d, 4c188804928d4e948e392c84d47d8836_dcaegen2-analytics_tca
2018-04-26 16:44:52.078 CFY <tcadocker-service-5> [tca_docker_me9xei.start] Task failed 'dockerplugin.create_and_start_container_for_components_with_streams' -> Container never became healthy
2018-04-26 16:44:52.309 CFY <tcadocker-service-5> 'install' workflow execution failed: RuntimeError: Workflow failed: Task failed 'dockerplugin.create_and_start_container_for_components_with_streams' -> Container never became healthy
Execution of workflow install for deployment tcadocker-service-5 failed. [error=Traceback (most recent call last):
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/dispatch.py", line 519, in _remote_workflow_child_thread
workflow_result = self._execute_workflow_function()
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/dispatch.py", line 550, in _execute_workflow_function
result = self.func(*self.args, **self.kwargs)
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/plugins/workflows.py", line 27, in install
node_instances=set(ctx.node_instances))
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/plugins/lifecycle.py", line 28, in install_node_instances
processor.install()
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/plugins/lifecycle.py", line 93, in install
graph_finisher_func=self._finish_install)
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/plugins/lifecycle.py", line 114, in _process_node_instances
self.graph.execute()
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/workflows/tasks_graph.py", line 133, in execute
self._handle_terminated_task(task)
File "/opt/mgmtworker/env/lib/python2.7/site-packages/cloudify/workflows/tasks_graph.py", line 207, in _handle_terminated_task
raise RuntimeError(message)
RuntimeError: Workflow failed: Task failed 'dockerplugin.create_and_start_container_for_components_with_streams' -> Container never became healthy
]
- Run 'cfy events list -e 65e85d94-bf2c-49d9-8592-71248c99d906' to retrieve the execution's events/logs
- mentioned in
-
Page Loading...