mesosphere/universe

building `spark-history` in local-universe `--include` fails

wallnerryan opened this issue · 2 comments

It fails with

resource.json no such file or directory and the entire build of local-universe.tar.gz fails

Traceback (most recent call last):
  File "/root/universe/docker/local-universe/../../scripts/local-universe.py", line 413, in <module>
    sys.exit(main())
  File "/root/universe/docker/local-universe/../../scripts/local-universe.py", line 135, in main
    dcos_version)):
  File "/usr/lib64/python3.4/concurrent/futures/_base.py", line 549, in result_iterator
    yield future.result()
  File "/usr/lib64/python3.4/concurrent/futures/_base.py", line 395, in result
    return self.__get_result()
  File "/usr/lib64/python3.4/concurrent/futures/_base.py", line 354, in __get_result
    raise self._exception
  File "/usr/lib64/python3.4/concurrent/futures/thread.py", line 54, in run
    result = self.fn(*self.args, **self.kwargs)
  File "/root/universe/docker/local-universe/../../scripts/local-universe.py", line 107, in handle_package
    args.nonlocal_cli
  File "/root/universe/docker/local-universe/../../scripts/local-universe.py", line 334, in prepare_repository
    with source_resource.open(encoding='utf-8') as source_file, \
  File "/usr/lib64/python3.4/pathlib.py", line 1086, in open
    opener=self._opener)
  File "/usr/lib64/python3.4/pathlib.py", line 960, in _opener
    return self._accessor.open(self, flags, mode)
  File "/usr/lib64/python3.4/pathlib.py", line 326, in wrapped
    return strfunc(str(pathobj), *args)
FileNotFoundError: [Errno 2] No such file or directory: '/root/universe/docker/local-universe/../../repo/packages/S/spark-history/0/resource.json'
make: *** [local-universe] Error 1
[root@universe-builder local-universe]# cat Makefile
REPO_BASE_DIR ?= $(shell pwd -P)/../..
static_version ?= 0.2-2
static_image ?= universe-static:$(static_version)

.PHONY: certs base clean

certs:
	mkdir certs && openssl req					\
		-newkey rsa:4096 -nodes -sha256 -keyout certs/domain.key	\
		-x509 -days 365 -out certs/domain.crt			\
		-subj "/CN=master.mesos"

base: clean certs
	docker build -t universe-base -f Dockerfile.base .

clean:
	rm -rf certs &&							\
	rm -f local-universe.tar.gz || 0

static-build:
	docker build -t $(static_image) -f Dockerfile.static .

static-online:
	docker pull mesosphere/$(static_image) && \
		docker tag mesosphere/$(static_image) $(static_image)

static-base:
	docker build -t universe-base -f Dockerfile.static.base .

local-universe: clean
	python3 $(REPO_BASE_DIR)/scripts/local-universe.py			\
		--repository $(REPO_BASE_DIR)/repo/packages/	\
		--include="marathon-lb,jenkins,cassandra,spark,spark-history,beta-confluent-kafka,confluent-kafka,beta-kafka,chronos,etcd,grafana,influxdb,kafka,kibana,mariadb,postgres,mongodb,mysql,neo4j,neo4jreplica,redis" &&							\
	docker save -o local-universe.tar mesosphere/universe:latest &&		\
	gzip local-universe.tar

I have the same issue failed to build spark-history is there any fix or work around for this issue?

For the time being we packaged up spark-history ourselves and serve it to the cluster via artifactory.