123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299 |
- dist: xenial
- language: python
- python: 2.7
- branches:
- only:
- - master
- cache:
- yarn: true
- directories:
- - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
- - "$NODE_DIR"
- - node_modules
- - "${HOME}/google-cloud-sdk"
- addons:
- apt:
- update: true
- packages:
- - libxmlsec1-dev
- - libmaxminddb-dev
- chrome: stable
- env:
- global:
- - NODE_ENV=development
- # PIP_VERSION causes issues because: https://github.com/pypa/pip/issues/4528
- # Note: this has to be synced with the pip version in the Makefile.
- - PYTHON_PIP_VERSION=19.2.3
- - PIP_USE_PEP517=off
- - PIP_DISABLE_PIP_VERSION_CHECK=on
- - PIP_QUIET=1
- - SENTRY_LIGHT_BUILD=1
- - SENTRY_SKIP_BACKEND_VALIDATION=1
- - MIGRATIONS_TEST_MIGRATE=0
- # Use this to override the django version in the requirements file.
- - DJANGO_VERSION=">=1.9,<1.10"
- # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
- - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
- - NODE_OPTIONS=--max-old-space-size=4096
- before_install:
- - &pip_install pip install "pip==${PYTHON_PIP_VERSION}"
- script:
- # certain commands require sentry init to be run, but this is only true for
- # running things within Travis
- - make travis-test-$TEST_SUITE
- - make travis-scan-$TEST_SUITE
- # installing dependencies for after_* steps here ensures they get cached
- # since those steps execute after travis runs `store build cache`
- after_failure:
- - dmesg | tail -n 100
- after_script:
- - |
- coverage_files=$(ls .artifacts/*coverage.xml || true)
- if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
- pip install -U codecov
- codecov -e TEST_SUITE
- fi
- - ./bin/yarn global add @zeus-ci/cli
- - $(./bin/yarn global bin)/zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
- - $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
- - $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
- - $(./bin/yarn global bin)/zeus upload -t "text/html+pytest" .artifacts/*pytest.html
- - $(./bin/yarn global bin)/zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
- - $(./bin/yarn global bin)/zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
- - $(./bin/yarn global bin)/zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
- base_postgres: &postgres_default
- python: 2.7
- services:
- - memcached
- - redis-server
- - postgresql
- before_install:
- - *pip_install
- - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.11
- - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- - docker ps -a
- install:
- - python setup.py install_egg_info
- - pip install -U -e ".[dev]"
- before_script:
- - psql -c 'create database sentry;' -U postgres
- base_acceptance: &acceptance_default
- python: 2.7
- services:
- - docker
- - memcached
- - redis-server
- - postgresql
- before_install:
- - *pip_install
- - find "$NODE_DIR" -type d -empty -delete
- - nvm install
- - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.11
- - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- - docker ps -a
- install:
- - ./bin/yarn install --pure-lockfile
- - python setup.py install_egg_info
- - pip install -U -e ".[dev]"
- - |
- CHROME_MAJOR_VERSION="$(dpkg -s google-chrome-stable | sed -nr 's/Version: ([0-9]+).*/\1/p')"
- wget -N "https://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE_${CHROME_MAJOR_VERSION})/chromedriver_linux64.zip" -P ~/
- - unzip ~/chromedriver_linux64.zip -d ~/
- - rm ~/chromedriver_linux64.zip
- - sudo install -m755 ~/chromedriver /usr/local/bin/
- before_script:
- - psql -c 'create database sentry;' -U postgres
- # each job in the matrix inherits `env/global` and uses everything above,
- # but custom `services`, `before_install`, `install`, and `before_script` directives
- # may be defined to define and setup individual job environments with more precision.
- matrix:
- fast_finish: true
- include:
- # Lint python and javascript together
- - python: 2.7
- name: 'Linter'
- env: TEST_SUITE=lint
- install:
- - python setup.py install_egg_info
- - SENTRY_LIGHT_BUILD=1 pip install -U -e ".[dev]"
- - find "$NODE_DIR" -type d -empty -delete
- - nvm install
- - ./bin/yarn install --pure-lockfile
- # Proactive linting on 3.7 during the porting process
- - python: 3.7
- name: 'Linter (Python 3.7)'
- install: pip install 'sentry-flake8>=0.2.0,<0.3.0'
- # configuration for flake8 can be found in setup.cfg
- script: flake8
- - <<: *postgres_default
- name: 'Backend [Postgres] (1/2)'
- env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
- - <<: *postgres_default
- name: 'Backend [Postgres] (2/2)'
- env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
- - <<: *postgres_default
- name: 'Backend with migrations [Postgres] (1/2)'
- env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0 MIGRATIONS_TEST_MIGRATE=1
- - <<: *postgres_default
- name: 'Backend with migrations [Postgres] (2/2)'
- env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1 MIGRATIONS_TEST_MIGRATE=1
- - <<: *acceptance_default
- name: 'Acceptance'
- env: TEST_SUITE=acceptance USE_SNUBA=1
- # allowed to fail
- - <<: *postgres_default
- name: '[Django 1.10] Backend [Postgres] (1/2)'
- env: DJANGO_VERSION=">=1.10,<1.11" TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
- # allowed to fail
- - <<: *postgres_default
- name: '[Django 1.10] Backend [Postgres] (2/2)'
- env: DJANGO_VERSION=">=1.10,<1.11" TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
- # allowed to fail
- - <<: *acceptance_default
- name: '[Django 1.10] Acceptance'
- env: DJANGO_VERSION=">=1.10,<1.11" TEST_SUITE=acceptance USE_SNUBA=1 PERCY_ENABLE=0
- # allowed to fail
- - <<: *acceptance_default
- name: 'Plugins'
- env: TEST_SUITE=plugins DB=postgres PERCY_TOKEN=${PLUGIN_PERCY_TOKEN}
- - python: 2.7
- name: 'Frontend'
- env: TEST_SUITE=js
- before_install:
- - *pip_install
- - find "$NODE_DIR" -type d -empty -delete
- - nvm install
- install:
- - ./bin/yarn install --pure-lockfile
- - python: 2.7
- name: 'Command Line'
- env: TEST_SUITE=cli
- services:
- - postgresql
- - redis-server
- install:
- - python setup.py install_egg_info
- - pip install -U -e .
- before_script:
- - psql -c 'create database sentry;' -U postgres
- - python: 2.7
- name: 'Distribution build'
- env: TEST_SUITE=dist
- before_install:
- - *pip_install
- - find "$NODE_DIR" -type d -empty -delete
- - nvm install
- - <<: *postgres_default
- name: 'Symbolicator Integration'
- env: TEST_SUITE=symbolicator
- before_install:
- - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.11
- - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- - docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
- - docker ps -a
- - python: 2.7
- name: 'Snuba Integration'
- env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
- services:
- - docker
- - memcached
- - redis-server
- - postgresql
- before_install:
- - *pip_install
- - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.11
- - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- - docker ps -a
- install:
- - python setup.py install_egg_info
- - pip install -U -e ".[dev]"
- - pip install confluent-kafka
- before_script:
- - psql -c 'create database sentry;' -U postgres
- # allowed to fail
- - python: 2.7
- name: '[Django 1.10] Snuba Integration'
- env: DJANGO_VERSION=">=1.10,<1.11" TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
- services:
- - docker
- - memcached
- - redis-server
- - postgresql
- before_install:
- - *pip_install
- - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.11
- - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- - docker ps -a
- install:
- - python setup.py install_egg_info
- - pip install -U -e ".[dev]"
- - pip install confluent-kafka
- before_script:
- - psql -c 'create database sentry;' -U postgres
- # Deploy 'storybook' (component & style guide) - allowed to fail
- - name: 'Storybook Deploy'
- language: node_js
- env: STORYBOOK_BUILD=1
- before_install:
- # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
- - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
- # If the SDK is not already cached, download it and unpack it
- - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
- - tar -xzf credentials.tar.gz
- # Use the decrypted service account credentials to authenticate the command line tool
- - gcloud auth activate-service-account --key-file client-secret.json
- install:
- - ./bin/yarn install --pure-lockfile
- - gcloud version
- script: bash .travis/deploy-storybook.sh
- after_success: skip
- after_failure: skip
- allow_failures:
- - name: 'Storybook Deploy'
- - name: '[Django 1.10] Backend [Postgres] (1/2)'
- - name: '[Django 1.10] Backend [Postgres] (2/2)'
- - name: '[Django 1.10] Acceptance'
- - name: '[Django 1.10] Snuba Integration'
- notifications:
- webhooks:
- urls:
- - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
- on_success: always
- on_failure: always
- on_start: always
- on_cancel: always
- on_error: always
|