# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for # high-level documentation on our CircleCI setup. # See CircleCI upstream's docs on this config format: # https://circleci.com/docs/2.0/language-python/ # version: 2.0 aliases: - &create_cache_directories run: name: create cache directories command: | dirs=(/srv/zulip-{npm,venv,emoji}-cache) sudo mkdir -p "${dirs[@]}" sudo chown -R circleci "${dirs[@]}" - &restore_cache_package_json restore_cache: keys: - v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} - &restore_cache_requirements restore_cache: keys: - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }} - &restore_emoji_cache restore_cache: keys: - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }} - &install_dependencies run: name: install dependencies command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils # CircleCI sets the following in Git config at clone time: # url.ssh://git@github.com.insteadOf https://github.com # This breaks the Git clones in the NVM `install.sh` we run # in `install-node`. # TODO: figure out why that breaks, and whether we want it. # (Is it an optimization?) rm -f /home/circleci/.gitconfig # This is the main setup job for the test suite mispipe "tools/ci/setup-backend --skip-dev-db-build" ts # Cleaning caches is mostly unnecessary in Circle, because # most builds don't get to write to the cache. # mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0 2>&1" ts - &save_cache_package_json save_cache: paths: - /srv/zulip-npm-cache key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} - &save_cache_requirements save_cache: paths: - /srv/zulip-venv-cache key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }} - &save_emoji_cache save_cache: paths: - /srv/zulip-emoji-cache key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }} - &do_bionic_hack run: name: do Bionic hack command: | # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf - &run_backend_tests run: name: run backend tests command: | . /srv/zulip-py3-venv/bin/activate mispipe "./tools/ci/backend 2>&1" ts - &run_frontend_tests run: name: run frontend tests command: | . /srv/zulip-py3-venv/bin/activate mispipe "./tools/ci/frontend 2>&1" ts - &upload_coverage_report run: name: upload coverage report command: | # codecov requires `.coverage` file to be stored in pwd for # uploading coverage results. mv /home/circleci/zulip/var/.coverage /home/circleci/zulip/.coverage . /srv/zulip-py3-venv/bin/activate # TODO: Check that the next release of codecov doesn't # throw find error. # codecov==2.0.16 introduced a bug which uses "find" # for locating files which is buggy on some platforms. # It was fixed via https://github.com/codecov/codecov-python/pull/217 # and should get automatically fixed here once it's released. # We cannot pin the version here because we need the latest version for uploading files. # see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7 pip install codecov && codecov \ || echo "Error in uploading coverage reports to codecov.io." - &build_production run: name: build production command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils mispipe "./tools/ci/production-build 2>&1" ts - &production_extract_tarball run: name: production extract tarball command: | sudo apt-get update # Install moreutils so we can use `ts` and `mispipe` in the following. sudo apt-get install -y moreutils mispipe "/tmp/production-extract-tarball 2>&1" ts - &install_production run: name: install production command: | sudo service rabbitmq-server restart sudo --preserve-env=CIRCLECI mispipe "/tmp/production-install 2>&1" ts - &verify_production run: name: verify install command: | sudo --preserve-env=CIRCLECI mispipe "/tmp/production-verify 2>&1" ts - &upgrade_postgresql run: name: upgrade postgresql command: | sudo --preserve-env=CIRCLECI mispipe "/tmp/production-upgrade-pg 2>&1" ts - &check_xenial_provision_error run: name: check tools/provision error message on xenial command: | ! tools/provision > >(tee provision.out) grep -Fqx 'CRITICAL:root:Unsupported platform: ubuntu 16.04' provision.out - &check_xenial_upgrade_error run: name: check scripts/lib/upgrade-zulip-stage-2 error message on xenial command: | ! sudo scripts/lib/upgrade-zulip-stage-2 2> >(tee upgrade.err >&2) grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err - ¬ify_failure_status run: name: On fail when: on_fail branches: only: master command: | if [[ "$CIRCLE_REPOSITORY_URL" == "git@github.com:zulip/zulip.git" && "$ZULIP_BOT_KEY" != "" ]]; then curl -H "Content-Type: application/json" \ -X POST -i 'https://chat.zulip.org/api/v1/external/circleci?api_key='"$ZULIP_BOT_KEY"'&stream=automated%20testing&topic=master%20failing' \ -d '{"payload": { "branch": "'"$CIRCLE_BRANCH"'", "reponame": "'"$CIRCLE_PROJECT_REPONAME"'", "status": "failed", "build_url": "'"$CIRCLE_BUILD_URL"'", "username": "'"$CIRCLE_USERNAME"'"}}' fi jobs: "bionic-backend-frontend": docker: # This is built from tools/ci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: arpit551/circleci:bionic-python-test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *do_bionic_hack - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *install_dependencies - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache - *run_backend_tests - run: name: test locked requirements command: | . /srv/zulip-py3-venv/bin/activate mispipe "./tools/test-locked-requirements 2>&1" ts - *run_frontend_tests # We only need to upload coverage reports on whichever platform # runs the frontend tests. - *upload_coverage_report - store_artifacts: path: ./var/casper/ destination: casper - store_artifacts: path: ./var/puppeteer/ destination: puppeteer - store_test_results: path: ./var/xunit-test-results/casper/ - *notify_failure_status "focal-backend": docker: # This is built from tools/ci/images/focal/Dockerfile. # Focal ships with Python 3.8.2. - image: arpit551/circleci:focal-python-test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *install_dependencies - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache - *run_backend_tests - run: name: Check development database build command: mispipe "tools/ci/setup-backend" ts - *notify_failure_status "xenial-legacy": docker: - image: arpit551/circleci:xenial-python-test working_directory: ~/zulip steps: - checkout - *check_xenial_provision_error - *check_xenial_upgrade_error - *notify_failure_status "bionic-production-build": docker: # This is built from tools/ci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: arpit551/circleci:bionic-python-test working_directory: ~/zulip steps: - checkout - *create_cache_directories - *do_bionic_hack - *restore_cache_package_json - *restore_cache_requirements - *restore_emoji_cache - *build_production - *save_cache_package_json - *save_cache_requirements - *save_emoji_cache # Persist the built tarball to be used in downstream job # for installation of production server. # See https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs - persist_to_workspace: # Must be an absolute path, # or relative path from working_directory. # This is a directory on the container which is # taken to be the root directory of the workspace. root: /tmp/production-build # Must be relative path from root paths: - "*" - *notify_failure_status "bionic-production-install": docker: # This is built from tools/ci/images/bionic/Dockerfile . # Bionic ships with Python 3.6. - image: arpit551/circleci:bionic-python-test working_directory: ~/zulip steps: # Contains the built tarball from bionic-production-build job - attach_workspace: # Must be absolute path or relative path from working_directory at: /tmp - *create_cache_directories - *do_bionic_hack - *production_extract_tarball - *restore_cache_package_json - *install_production - *verify_production - *upgrade_postgresql - *verify_production - *save_cache_package_json - *notify_failure_status "focal-production-install": docker: # This is built from tools/ci/images/focal/Dockerfile. # Focal ships with Python 3.8.2. - image: arpit551/circleci:focal-python-test working_directory: ~/zulip steps: # Contains the built tarball from bionic-production-build job - attach_workspace: # Must be absolute path or relative path from working_directory at: /tmp - *create_cache_directories - run: name: do memcached hack command: | # Temporary hack till memcached upstream is updated in Focal. # https://bugs.launchpad.net/ubuntu/+source/memcached/+bug/1878721 echo "export SASL_CONF_PATH=/etc/sasl2" | sudo tee - a /etc/default/memcached - *production_extract_tarball - *restore_cache_package_json - *install_production - *verify_production - *save_cache_package_json - *notify_failure_status workflows: version: 2 "Ubuntu 16.04 Xenial (Python 3.5, legacy)": jobs: - "xenial-legacy" "Ubuntu 18.04 Bionic (Python 3.6, backend+frontend)": jobs: - "bionic-backend-frontend" "Ubuntu 20.04 Focal (Python 3.8, backend)": jobs: - "focal-backend" "Production": jobs: - "bionic-production-build" - "bionic-production-install": requires: - "bionic-production-build" - "focal-production-install": requires: - "bionic-production-build"