nicholasjackson/fake-service

[ERROR] Error obtaining context

crizstian opened this issue · 3 comments

Hey Nicholas,
thanks for sharing your demo code, I saw your webinar on demo-consul-service-mesh

I am converting your docker-compose files into nomad.hcl files, I have done that and they are running but, I am getting the following error when I call the upstream

CURL Command

root@ip-10-11-2-27:/home/ubuntu/connect# curl 10.11.2.27:9090/v1/
Error communicating with upstream service: Get http://localhost:9091/v1/: dial tcp 127.0.0.1:9091: connect: connection refused

Logs

2019-09-06T18:57:01.519Z [INFO]  Handling request: request="GET /v1/ HTTP/1.1
Host: 10.11.2.27:9090
user-agent: curl/7.47.0
accept: */*"
2019-09-06T18:57:01.519Z [ERROR] Error obtaining context, creating new span: error="opentracing: SpanContext not found in Extract carrier"
2019-09-06T18:57:01.519Z [INFO]  Calling upstream HTTP service: uri=http://localhost:9091

maybe this is because I am not using envoy and I am using the consul proxy

    task "web" {
      driver = "docker"
      config {
        image   = "nicholasjackson/fake-service:v0.3.5"
        force_pull = true
        port_map {
          serviceport = 9090
        }
      }

      env {
        LISTEN_ADDR = "0.0.0.0:9090"
        UPSTREAM_URIS = "http://localhost:9091"
        UPSTREAM_CALL = "true"
        MESSAGE = "Hello World"
        HTTP_CLIENT_KEEP_ALIVES = "false"
      }

      resources {
        network {
            mbits = 10
            port "serviceport" {
              static = 9090
            }
            port "aero" {}
        }
      }
      service {
        name = "web"
        tags = [ "v1" ]
        port = "serviceport"
        check {
          type     = "http"
          port     = "serviceport"
          path     = "/health"
          interval = "5s"
          timeout  = "2s"
        }
      }
    }

    task "web-sidecar-proxy" {
      driver = "exec"

      config {
        command = "/usr/local/bin/run-proxy.sh"
        args    = ["${NOMAD_IP_proxy}", "${NOMAD_TASK_DIR}", "${NOMAD_META_source_target}"]
      }

      meta {
        source_target       = "web"
        dest_target         = "api-v1"
      }

      template {
        data = <<EOH
          {
              "name": "{{ env "NOMAD_META_source_target" }}-sidecar-proxy",
              "port": {{ env "NOMAD_PORT_proxy" }},
              "kind": "connect-proxy",
              "proxy": {
                "destination_service_name": "{{ env "NOMAD_META_source_target" }}",
                "destination_service_id": "{{ env "NOMAD_META_source_target" }}",
                "upstreams": [
                  {
                    "destination_name": "{{ env "dest_target" }}",
                    "local_bind_address": "127.0.0.1",
                    "local_bind_port": 9091
                  }
                ]
              }
          }
          EOH

        destination = "local/${NOMAD_META_source_target}-proxy.json"
      }

      resources {
        network {
          port "proxy" {}
        }
      }
    } # - end upstream proxy - #

I am going to switch to envoy proxy to see the results

Same issue by running with envoy, I don't know what am I missing

job "traffic-routing" {

  datacenters = ["aero-aws-us-west-1"]
  region      = "aero-aws-us-west-1-region"
  type        = "service"

  constraint {
    attribute = "${node.class}"
    value     = "app"
  }

  constraint {
    attribute = "${node.unique.name}"
    value     = "ip-10-11-2-27"
  }

  group "web" {
    count = 1

    task "web" {
      driver = "docker"
      config {
        image   = "nicholasjackson/fake-service:v0.3.5"
        force_pull = true
        port_map {
          serviceport = 9090
        }
      }

      env {
        LISTEN_ADDR = "0.0.0.0:9090"
        UPSTREAM_URIS = "http://localhost:9091"
        UPSTREAM_CALL = "true"
        MESSAGE = "Hello World"
        HTTP_CLIENT_KEEP_ALIVES = "false"
        HTTP_CLIENT_APPEND_REQUEST = "true"
        NAME = "web"
      }

      resources {
        network {
            mbits = 10
            port "serviceport" {}
            port "aero" {}
        }
      }
    }

    task "web-sidecar-proxy" {
      driver = "docker"
      config {
        image   = "nicholasjackson/consul-envoy:v1.6.0-v0.10.0"
        force_pull = true
        command = "consul connect envoy -sidecar-for web-v1"
        port_map {
          serviceport = 9090
        }
        volumes = [
          "local/:/config"
        ]
      }

      env {
        CONSUL_HTTP_ADDR = "${NOMAD_IP_aero}:8500"
        CONSUL_GRPC_ADDR = "${NOMAD_IP_aero}:8502"
        SERVICE_CONFIG =  "/config/web.json"
      }

      template {
        data = <<EOH
        {
          "service": {
            "name": "web",
            "id":"web-v1",
            "address": "{{env "NOMAD_IP_aero"}}",
            "port": 9090,
            "check": {
              "id": "web-v1",
              "name": "HTTP web-v1 on port 9090",
              "http": "http://{{env "NOMAD_ADDR_web_serviceport"}}/health",
              "interval": "10s",
              "timeout": "1s"
            },
            "connect": { 
              "sidecar_service": {
                "port": 20000,
                "proxy": {
                  "upstreams": [
                    {
                      "destination_name": "api",
                      "local_bind_address": "127.0.0.1",
                      "local_bind_port": 9091
                    }
                  ]
                }
              }
            }  
          }
        }
        EOH

        destination = "local/web.json"
      }

      resources {
        network {
          mbits = 10
          port "aero" {}
        }
      }
    } 
  }

  group "apiv1" {
    count = 1

    task "apiv1" {
      driver = "docker"
      config {
        image   = "nicholasjackson/fake-service:v0.3.5"
        force_pull = true
        port_map {
          serviceport = 9090
        }
      }

      env {
        LISTEN_ADDR = "0.0.0.0:9090"
        NAME = "api"
        MESSAGE = "API V1"
        SERVER_TYPE = "http"
      }

      resources {
        network {
            mbits = 10
            port "serviceport" {}
            port "aero" {}
        }
      }
    }

    task "apiv1-sidecar-proxy" {
      driver = "docker"
      config {
        image   = "nicholasjackson/consul-envoy:v1.6.0-v0.10.0"
        force_pull = true
        command = "consul connect envoy -sidecar-for api-v1"
        port_map {
          serviceport = 9090
        }
        volumes = [
          "local/:/config"
        ]
      }

      env {
        CONSUL_HTTP_ADDR = "${NOMAD_IP_aero}:8500"
        CONSUL_GRPC_ADDR = "${NOMAD_IP_aero}:8502"
        SERVICE_CONFIG =  "/config/api_v1.json"
      }

      template {
        data = <<EOH
        {
          "service": {
            "name": "api-v1",
            "id":"api-v1",
            "address": "{{env "NOMAD_IP_aero"}}",
            "port": 9090,
            "check": {
              "id": "api-v1",
              "name": "HTTP api-v1 on port 9090",
              "http": "http://{{env "NOMAD_ADDR_apiv1_serviceport"}}/health",
              "interval": "10s",
              "timeout": "1s"
            },
            "connect": { 
              "sidecar_service": {
                "port": 20000,
                "proxy": {}
              }
            }  
          }
        }
        EOH

        destination = "local/api_v1.json"
      }

      resources {
        network {
          mbits = 10
          port "aero" {}
        }
      }
    } 
  }

    group "apiv2" {
    count = 1

    task "apiv2" {
      driver = "docker"
      config {
        image   = "nicholasjackson/fake-service:v0.3.5"
        force_pull = true
        port_map {
          serviceport = 9090
        }
      }

      env {
        LISTEN_ADDR = "0.0.0.0:9090"
        NAME = "api"
        MESSAGE = "API V2"
        SERVER_TYPE = "http"
      }

      resources {
        network {
            mbits = 10
            port "serviceport" {}
            port "aero" {}
        }
      }
    }

    task "apiv2-sidecar-proxy" {
      driver = "docker"
      config {
        image   = "nicholasjackson/consul-envoy:v1.6.0-v0.10.0"
        force_pull = true
        command = "consul connect envoy -sidecar-for api-v2"
        port_map {
          serviceport = 9090
        }
        volumes = [
          "local/:/config"
        ]
      }

      env {
        CONSUL_HTTP_ADDR = "${NOMAD_IP_aero}:8500"
        CONSUL_GRPC_ADDR = "${NOMAD_IP_aero}:8502"
        SERVICE_CONFIG =  "/config/api_v2.json"
      }

      template {
        data = <<EOH
        {
          "service": {
            "name": "api-v2",
            "id":"api-v2",
            "address": "{{env "NOMAD_IP_aero"}}",
            "port": 9090,
            "check": {
              "id": "api-v2",
              "name": "HTTP api-v2 on port 9090",
              "http": "http://{{env "NOMAD_ADDR_apiv2_serviceport"}}/health",
              "interval": "10s",
              "timeout": "1s"
            },
            "connect": { 
              "sidecar_service": {
                "port": 20000,
                "proxy": {}
              }
            }  
          }
        }
        EOH

        destination = "local/api_v2.json"
      }

      resources {
        network {
          mbits = 10
          port "aero" {}
        }
      }
    } 
  }
}

Hey @crizstian

So I appologise, that error is not really an error. Fake Service has open tracing enabled, when there is no span to be created from the headers in an in bound request that error message is shown. If you have not configured a collector for DataDog or Zipkin then for simplicity I do not disable the tracing just throw the data into /dev/null. The side effect is that every service call will raise an error in the logs. This does not affect the actual operation but is noise. I have changed the error type of this to Debug as it is a useful feature when setting up tracing. Log level can be controlled by the env var LOG_LEVEL.

I also appologise about the lateness in my reply, I have just made sure I subscribe to the notifications in this repo.