.travis.yml 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. dist: trusty
  2. group: deprecated-2017Q4
  3. language: python
  4. python: 2.7
  5. branches:
  6. only:
  7. - master
  8. cache:
  9. yarn: true
  10. directories:
  11. - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
  12. - "$NODE_DIR"
  13. - node_modules
  14. - "${HOME}/google-cloud-sdk"
  15. addons:
  16. apt:
  17. update: true
  18. packages:
  19. - libxmlsec1-dev
  20. - libgeoip-dev
  21. chrome: stable
  22. env:
  23. global:
  24. - NODE_ENV=development
  25. - PIP_DISABLE_PIP_VERSION_CHECK=on
  26. - PIP_QUIET=1
  27. - SENTRY_LIGHT_BUILD=1
  28. - SENTRY_SKIP_BACKEND_VALIDATION=1
  29. - SOUTH_TESTS_MIGRATE=0
  30. - DJANGO_VERSION=">=1.8,<1.9"
  31. # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
  32. - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
  33. - NODE_OPTIONS=--max-old-space-size=4096
  34. script:
  35. # certain commands require sentry init to be run, but this is only true for
  36. # running things within Travis
  37. - make travis-test-$TEST_SUITE
  38. - make travis-scan-$TEST_SUITE
  39. # installing dependencies for after_* steps here ensures they get cached
  40. # since those steps execute after travis runs `store build cache`
  41. after_failure:
  42. - dmesg | tail -n 100
  43. after_script:
  44. - |
  45. coverage_files=$(ls .artifacts/*coverage.xml || true)
  46. if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
  47. pip install -U codecov
  48. codecov -e TEST_SUITE
  49. fi
  50. - ./bin/yarn global add @zeus-ci/cli
  51. - zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
  52. - zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
  53. - zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
  54. - zeus upload -t "text/html+pytest" .artifacts/*pytest.html
  55. - zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
  56. - zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
  57. - zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
  58. # Trigger a build for the `git` image on docker-sentry
  59. - >
  60. if [ "$TRAVIS_PULL_REQUEST" == "false" -a "$TRAVIS_BRANCH" == "master" ]; then
  61. curl -s -X POST
  62. -H "Content-Type: application/json"
  63. -H "Accept: application/json"
  64. -H "Travis-API-Version: 3"
  65. -H "Authorization: token $TRAVIS_TOKEN"
  66. -d '{
  67. "request": {
  68. "branch": "master",
  69. "config": {
  70. "env": {
  71. "matrix": [
  72. "VERSION=git"
  73. ]
  74. }
  75. },
  76. "message": "Build for getsentry/sentry@'"$TRAVIS_COMMIT"'"
  77. }
  78. }'
  79. https://api.travis-ci.org/repo/getsentry%2Fdocker-sentry/requests
  80. base_postgres: &postgres_default
  81. python: 2.7
  82. services:
  83. - memcached
  84. - redis-server
  85. - postgresql
  86. install:
  87. - python setup.py install_egg_info
  88. - pip install -U -e ".[dev,tests,optional]"
  89. before_script:
  90. - psql -c 'create database sentry;' -U postgres
  91. before_install:
  92. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  93. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  94. - docker ps -a
  95. base_acceptance: &acceptance_default
  96. python: 2.7
  97. services:
  98. - docker
  99. - memcached
  100. - redis-server
  101. - postgresql
  102. before_install:
  103. - find "$NODE_DIR" -type d -empty -delete
  104. - nvm install
  105. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  106. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  107. - docker ps -a
  108. install:
  109. - ./bin/yarn install --pure-lockfile
  110. - python setup.py install_egg_info
  111. - pip install -U -e ".[dev,tests,optional]"
  112. - wget -N "https://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE_74)/chromedriver_linux64.zip" -P ~/
  113. - unzip ~/chromedriver_linux64.zip -d ~/
  114. - rm ~/chromedriver_linux64.zip
  115. - sudo install -m755 ~/chromedriver /usr/local/bin/
  116. before_script:
  117. - psql -c 'create database sentry;' -U postgres
  118. # each job in the matrix inherits `env/global` and uses everything above,
  119. # but custom `services`, `before_install`, `install`, and `before_script` directives
  120. # may be defined to define and setup individual job environments with more precision.
  121. matrix:
  122. fast_finish: true
  123. include:
  124. # Lint python and javascript together
  125. - python: 2.7
  126. name: 'Linter'
  127. env: TEST_SUITE=lint
  128. install:
  129. - SENTRY_LIGHT_BUILD=1 pip install -U -e ".[dev,tests,optional]"
  130. - find "$NODE_DIR" -type d -empty -delete
  131. - nvm install
  132. - ./bin/yarn install --pure-lockfile
  133. - <<: *postgres_default
  134. name: 'Backend [Postgres] (1/2)'
  135. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  136. - <<: *postgres_default
  137. name: 'Backend [Postgres] (2/2)'
  138. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  139. - python: 2.7
  140. name: 'Backend [Riak]'
  141. env: TEST_SUITE=riak DB=postgres
  142. services:
  143. - memcached
  144. - redis-server
  145. - postgresql
  146. - riak
  147. install:
  148. - python setup.py install_egg_info
  149. - pip install -U -e ".[dev,tests,optional]"
  150. before_script:
  151. - psql -c 'create database sentry;' -U postgres
  152. before_install:
  153. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  154. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  155. - docker ps -a
  156. - <<: *acceptance_default
  157. name: 'Acceptance'
  158. env: TEST_SUITE=acceptance USE_SNUBA=1
  159. - python: 2.7
  160. name: 'Frontend'
  161. env: TEST_SUITE=js
  162. before_install:
  163. - find "$NODE_DIR" -type d -empty -delete
  164. - nvm install
  165. install:
  166. - ./bin/yarn install --pure-lockfile
  167. - python: 2.7
  168. name: 'Command Line'
  169. env: TEST_SUITE=cli
  170. services:
  171. - postgresql
  172. - redis-server
  173. install:
  174. - python setup.py install_egg_info
  175. - pip install -U -e .
  176. before_script:
  177. - psql -c 'create database sentry;' -U postgres
  178. - python: 2.7
  179. name: 'Distribution build'
  180. env: TEST_SUITE=dist
  181. before_install:
  182. - find "$NODE_DIR" -type d -empty -delete
  183. - nvm install
  184. - <<: *postgres_default
  185. name: 'Symbolicator Integration'
  186. env: TEST_SUITE=symbolicator
  187. before_install:
  188. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  189. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  190. - docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
  191. - docker ps -a
  192. # snuba in testing
  193. - python: 2.7
  194. name: 'Snuba Integration'
  195. env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
  196. services:
  197. - docker
  198. - memcached
  199. - redis-server
  200. - postgresql
  201. before_install:
  202. - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
  203. - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
  204. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  205. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  206. - docker ps -a
  207. install:
  208. - python setup.py install_egg_info
  209. - pip install -U -e ".[dev,tests,optional]"
  210. - pip install confluent-kafka
  211. before_script:
  212. - psql -c 'create database sentry;' -U postgres
  213. # Deploy 'storybook' (component & style guide) - allowed to fail
  214. - language: node_js
  215. name: 'Storybook Deploy'
  216. env: STORYBOOK_BUILD=1
  217. before_install:
  218. # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
  219. - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
  220. # If the SDK is not already cached, download it and unpack it
  221. - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
  222. - tar -xzf credentials.tar.gz
  223. # Use the decrypted service account credentials to authenticate the command line tool
  224. - gcloud auth activate-service-account --key-file client-secret.json
  225. install:
  226. - ./bin/yarn install --pure-lockfile
  227. - gcloud version
  228. script: bash .travis/deploy-storybook.sh
  229. after_success: skip
  230. after_failure: skip
  231. # jobs are defined in matrix/include
  232. # to specify which jobs are allowed to fail, match the env exactly in matrix/allow_failures
  233. allow_failures:
  234. - language: node_js
  235. env: STORYBOOK_BUILD=1
  236. notifications:
  237. webhooks:
  238. urls:
  239. - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
  240. on_success: always
  241. on_failure: always
  242. on_start: always
  243. on_cancel: always
  244. on_error: always