.travis.yml 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. dist: xenial
  2. language: python
  3. python: 2.7
  4. branches:
  5. only:
  6. - master
  7. cache:
  8. yarn: true
  9. directories:
  10. - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
  11. - "$NODE_DIR"
  12. - node_modules
  13. - "${HOME}/google-cloud-sdk"
  14. addons:
  15. apt:
  16. update: true
  17. packages:
  18. - libxmlsec1-dev
  19. - libmaxminddb-dev
  20. chrome: stable
  21. env:
  22. global:
  23. - NODE_ENV=development
  24. # PIP_VERSION causes issues because: https://github.com/pypa/pip/issues/4528
  25. # Note: this has to be synced with the pip version in the Makefile.
  26. - PYTHON_PIP_VERSION=19.2.3
  27. - PIP_USE_PEP517=off
  28. - PIP_DISABLE_PIP_VERSION_CHECK=on
  29. - PIP_QUIET=1
  30. - SENTRY_LIGHT_BUILD=1
  31. - SENTRY_SKIP_BACKEND_VALIDATION=1
  32. - MIGRATIONS_TEST_MIGRATE=0
  33. # Use this to override the django version in the requirements file.
  34. - DJANGO_VERSION=">=1.8,<1.9"
  35. # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
  36. - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
  37. - NODE_OPTIONS=--max-old-space-size=4096
  38. before_install:
  39. - &pip_install pip install "pip==${PYTHON_PIP_VERSION}"
  40. script:
  41. # certain commands require sentry init to be run, but this is only true for
  42. # running things within Travis
  43. - make travis-test-$TEST_SUITE
  44. - make travis-scan-$TEST_SUITE
  45. # installing dependencies for after_* steps here ensures they get cached
  46. # since those steps execute after travis runs `store build cache`
  47. after_failure:
  48. - dmesg | tail -n 100
  49. after_script:
  50. - |
  51. coverage_files=$(ls .artifacts/*coverage.xml || true)
  52. if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
  53. pip install -U codecov
  54. codecov -e TEST_SUITE
  55. fi
  56. - ./bin/yarn global add @zeus-ci/cli
  57. - $(./bin/yarn global bin)/zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
  58. - $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
  59. - $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
  60. - $(./bin/yarn global bin)/zeus upload -t "text/html+pytest" .artifacts/*pytest.html
  61. - $(./bin/yarn global bin)/zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
  62. - $(./bin/yarn global bin)/zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
  63. - $(./bin/yarn global bin)/zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
  64. base_postgres: &postgres_default
  65. python: 2.7
  66. services:
  67. - memcached
  68. - redis-server
  69. - postgresql
  70. before_install:
  71. - *pip_install
  72. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
  73. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  74. - docker ps -a
  75. install:
  76. - python setup.py install_egg_info
  77. - pip install -U -e ".[dev]"
  78. before_script:
  79. - psql -c 'create database sentry;' -U postgres
  80. base_acceptance: &acceptance_default
  81. python: 2.7
  82. services:
  83. - docker
  84. - memcached
  85. - redis-server
  86. - postgresql
  87. before_install:
  88. - *pip_install
  89. - find "$NODE_DIR" -type d -empty -delete
  90. - nvm install
  91. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
  92. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  93. - docker ps -a
  94. install:
  95. - ./bin/yarn install --pure-lockfile
  96. - python setup.py install_egg_info
  97. - pip install -U -e ".[dev]"
  98. - |
  99. CHROME_MAJOR_VERSION="$(dpkg -s google-chrome-stable | sed -nr 's/Version: ([0-9]+).*/\1/p')"
  100. wget -N "https://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE_${CHROME_MAJOR_VERSION})/chromedriver_linux64.zip" -P ~/
  101. - unzip ~/chromedriver_linux64.zip -d ~/
  102. - rm ~/chromedriver_linux64.zip
  103. - sudo install -m755 ~/chromedriver /usr/local/bin/
  104. before_script:
  105. - psql -c 'create database sentry;' -U postgres
  106. # each job in the matrix inherits `env/global` and uses everything above,
  107. # but custom `services`, `before_install`, `install`, and `before_script` directives
  108. # may be defined to define and setup individual job environments with more precision.
  109. matrix:
  110. fast_finish: true
  111. include:
  112. # Lint python and javascript together
  113. - python: 2.7
  114. name: 'Linter'
  115. env: TEST_SUITE=lint
  116. install:
  117. - python setup.py install_egg_info
  118. - SENTRY_LIGHT_BUILD=1 pip install -U -e ".[dev]"
  119. - find "$NODE_DIR" -type d -empty -delete
  120. - nvm install
  121. - ./bin/yarn install --pure-lockfile
  122. # Proactive linting on 3.7 during the porting process
  123. - python: 3.7
  124. name: 'Linter (Python 3.7)'
  125. install: pip install 'sentry-flake8>=0.2.0,<0.3.0'
  126. # configuration for flake8 can be found in setup.cfg
  127. script: flake8
  128. - <<: *postgres_default
  129. name: 'Backend [Postgres] (1/2)'
  130. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  131. - <<: *postgres_default
  132. name: 'Backend [Postgres] (2/2)'
  133. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  134. - <<: *postgres_default
  135. name: 'Backend with migrations [Postgres] (1/2)'
  136. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0 MIGRATIONS_TEST_MIGRATE=1
  137. - <<: *postgres_default
  138. name: 'Backend with migrations [Postgres] (2/2)'
  139. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1 MIGRATIONS_TEST_MIGRATE=1
  140. - <<: *acceptance_default
  141. name: 'Acceptance'
  142. env: TEST_SUITE=acceptance USE_SNUBA=1
  143. # allowed to fail
  144. - <<: *postgres_default
  145. name: 'Django 1.9 Backend [Postgres] (1/2)'
  146. env: DJANGO_VERSION=">=1.9,<1.10" TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  147. # allowed to fail
  148. - <<: *postgres_default
  149. name: 'Django 1.9 Backend [Postgres] (2/2)'
  150. env: DJANGO_VERSION=">=1.9,<1.10" TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  151. # allowed to fail
  152. - <<: *acceptance_default
  153. name: 'Django 1.9 Acceptance'
  154. env: DJANGO_VERSION=">=1.9,<1.10" TEST_SUITE=acceptance USE_SNUBA=1
  155. # allowed to fail
  156. - <<: *acceptance_default
  157. name: 'Plugins'
  158. env: TEST_SUITE=plugins DB=postgres PERCY_TOKEN=${PLUGIN_PERCY_TOKEN}
  159. # allowed to fail
  160. - <<: *acceptance_default
  161. name: 'Django 1.9 Plugins'
  162. env: DJANGO_VERSION=">=1.9,<1.10" TEST_SUITE=plugins DB=postgres PERCY_TOKEN=${PLUGIN_PERCY_TOKEN}
  163. - python: 2.7
  164. name: 'Frontend'
  165. env: TEST_SUITE=js
  166. before_install:
  167. - *pip_install
  168. - find "$NODE_DIR" -type d -empty -delete
  169. - nvm install
  170. install:
  171. - ./bin/yarn install --pure-lockfile
  172. - python: 2.7
  173. name: 'Command Line'
  174. env: TEST_SUITE=cli
  175. services:
  176. - postgresql
  177. - redis-server
  178. install:
  179. - python setup.py install_egg_info
  180. - pip install -U -e .
  181. before_script:
  182. - psql -c 'create database sentry;' -U postgres
  183. - python: 2.7
  184. name: 'Distribution build'
  185. env: TEST_SUITE=dist
  186. before_install:
  187. - *pip_install
  188. - find "$NODE_DIR" -type d -empty -delete
  189. - nvm install
  190. - <<: *postgres_default
  191. name: 'Symbolicator Integration'
  192. env: TEST_SUITE=symbolicator
  193. before_install:
  194. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
  195. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  196. - docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
  197. - docker ps -a
  198. - python: 2.7
  199. name: 'Snuba Integration'
  200. env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
  201. services:
  202. - docker
  203. - memcached
  204. - redis-server
  205. - postgresql
  206. before_install:
  207. - *pip_install
  208. - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
  209. - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
  210. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
  211. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  212. - docker ps -a
  213. install:
  214. - python setup.py install_egg_info
  215. - pip install -U -e ".[dev]"
  216. - pip install confluent-kafka
  217. before_script:
  218. - psql -c 'create database sentry;' -U postgres
  219. # Deploy 'storybook' (component & style guide) - allowed to fail
  220. - name: 'Storybook Deploy'
  221. language: node_js
  222. env: STORYBOOK_BUILD=1
  223. before_install:
  224. # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
  225. - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
  226. # If the SDK is not already cached, download it and unpack it
  227. - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
  228. - tar -xzf credentials.tar.gz
  229. # Use the decrypted service account credentials to authenticate the command line tool
  230. - gcloud auth activate-service-account --key-file client-secret.json
  231. install:
  232. - ./bin/yarn install --pure-lockfile
  233. - gcloud version
  234. script: bash .travis/deploy-storybook.sh
  235. after_success: skip
  236. after_failure: skip
  237. allow_failures:
  238. - name: 'Storybook Deploy'
  239. - name: 'Django 1.9 Backend [Postgres] (1/2)'
  240. - name: 'Django 1.9 Backend [Postgres] (2/2)'
  241. - name: 'Django 1.9 Acceptance'
  242. - name: 'Plugins'
  243. - name: 'Django 1.9 Plugins'
  244. notifications:
  245. webhooks:
  246. urls:
  247. - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
  248. on_success: always
  249. on_failure: always
  250. on_start: always
  251. on_cancel: always
  252. on_error: always