# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for # high-level documentation on our CircleCI setup. # See CircleCI upstream's docs on this config format: # https://circleci.com/docs/2.0/language-python/ # version: 2.0 aliases: - &create_cache_directories run: name: create cache directories command: | dirs=(/srv/zulip-{npm,venv,emoji}-cache) sudo mkdir -p "${dirs[@]}" sudo chown -R circleci "${dirs[@]}" - &restore_cache_package_json restore_cache: keys: - v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} - &restore_cache_requirements restore_cache: keys: - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }} - &restore_emoji_cache restore_cache: keys: - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }} - &install_dependencies run: name: install dependencies command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils # CircleCI sets the following in Git config at clone time: # url.ssh://git@github.com.insteadOf https://github.com # This breaks the Git clones in the NVM `install.sh` we run # in `install-node`. # TODO: figure out why that breaks, and whether we want it. # (Is it an optimization?) rm -f /home/circleci/.gitconfig # This is the main setup job for the test suite mispipe "tools/ci/setup-backend" ts # Cleaning caches is mostly unnecessary in Circle, because # most builds don't get to write to the cache. # mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts - &save_cache_package_json save_cache: paths: - /srv/zulip-npm-cache key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} - &save_cache_requirements save_cache: paths: - /srv/zulip-venv-cache key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }} - &save_emoji_cache save_cache: paths: - /srv/zulip-emoji-cache key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }} - &do_bionic_hack run: name: do Bionic hack command: | # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf - &run_backend_tests run: name: run backend tests command: | . /srv/zulip-py3-venv/bin/activate mispipe ./tools/ci/backend ts - &run_frontend_tests run: name: run frontend tests command: | . /srv/zulip-py3-venv/bin/activate mispipe ./tools/ci/frontend ts - &upload_coverage_report run: name: upload coverage report command: | . /srv/zulip-py3-venv/bin/activate pip install codecov && codecov \ || echo "Error in uploading coverage reports to codecov.io." - &build_production run: name: build production command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils mispipe ./tools/ci/production-build ts - &production_extract_tarball run: name: production extract tarball command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils mispipe /tmp/production-extract-tarball ts - &install_production run: name: install production command: | sudo service rabbitmq-server restart sudo mispipe /tmp/production-install ts - &check_xenial_provision_error run: name: check tools/provision error message on xenial command: | ! tools/provision > >(tee provision.out) grep -Fqx 'CRITICAL:root:Unsupported platform: ubuntu 16.04' provision.out - &check_xenial_upgrade_error run: name: check scripts/lib/upgrade-zulip-stage-2 error message on xenial command: | ! sudo scripts/lib/upgrade-zulip-stage-2 2> >(tee upgrade.err >&2) grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err - &check_xenial_puppet run: name: check scripts/zulip-puppet-apply on xenial command: | sudo apt-get update sudo apt-get -qy install lsb-release sudo mkdir /etc/zulip printf '[machine]\npuppet_classes = zulip::base, zulip::postgres_appdb_tuned\n' | sudo tee /etc/zulip/zulip.conf sudo scripts/zulip-puppet-apply -f sudo python3 -m py_compile /usr/local/bin/pg_backup_and_purge /usr/local/bin/process_fts_updates jobs: "bionic-backend-frontend-python3.6": docker: # This is built from tools/circleci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: amanagr/circleci:bionic-python-2.test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *do_bionic_hack - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *install_dependencies - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache - *run_backend_tests - run: name: test locked requirements command: | . /srv/zulip-py3-venv/bin/activate mispipe ./tools/test-locked-requirements ts - *run_frontend_tests # We only need to upload coverage reports on whichever platform # runs the frontend tests. - *upload_coverage_report - store_artifacts: path: ./var/casper/ destination: casper - store_artifacts: path: ../../../tmp/zulip-test-event-log/ destination: test-reports - store_test_results: path: ./var/xunit-test-results/casper/ "focal-backend-python3.8": docker: # This is built from tools/circleci/images/focal/Dockerfile. # Focal ships with Python 3.8.2. - image: arpit551/circleci:focal-test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *install_dependencies - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache - *run_backend_tests "xenial-legacy": docker: - image: gregprice/circleci:xenial-python-4.test working_directory: ~/zulip steps: - checkout - *check_xenial_provision_error - *check_xenial_upgrade_error - *check_xenial_puppet "bionic-production-build-python3.6": docker: # This is built from tools/circleci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: amanagr/circleci:bionic-python-2.test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *do_bionic_hack - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *build_production - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache # Persist the built tarball to be used in downstream job # for installation of production server. # See https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs - persist_to_workspace: # Must be an absolute path, # or relative path from working_directory. # This is a directory on the container which is # taken to be the root directory of the workspace. root: /tmp # Must be relative path from root paths: - zulip-server-test.tar.gz - success-http-headers.txt - production-install - production - production-extract-tarball "bionic-production-install-python3.6": docker: # This is built from tools/circleci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: amanagr/circleci:bionic-python-2.test working_directory: ~/zulip steps: # Contains the built tarball from bionic-production-build-python3.6 job - attach_workspace: # Must be absolute path or relative path from working_directory at: /tmp - *create_cache_directories - *do_bionic_hack - *production_extract_tarball - *restore_cache_package_json - *install_production - *save_cache_package_json "focal-production-install-python3.8": docker: # This is built from tools/circleci/images/focal/Dockerfile. # Focal ships with Python 3.8.2. - image: arpit551/circleci:focal-test working_directory: ~/zulip steps: # Contains the built tarball from bionic-production-build-python3.6 job - attach_workspace: # Must be absolute path or relative path from working_directory at: /tmp - *create_cache_directories - *do_bionic_hack - *production_extract_tarball - *restore_cache_package_json - *install_production - *save_cache_package_json workflows: version: 2 build: jobs: - "bionic-backend-frontend-python3.6" - "bionic-production-build-python3.6" - "focal-backend-python3.8" - "xenial-legacy" - "bionic-production-install-python3.6": requires: - "bionic-production-build-python3.6" # - "focal-production-install-python3.8": # requires: # - "bionic-production-build-python3.6"