.travis.yml 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. dist: trusty
  2. sudo: required
  3. group: deprecated-2017Q4
  4. language: python
  5. python: 2.7
  6. branches:
  7. only:
  8. - master
  9. cache:
  10. yarn: true
  11. directories:
  12. - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
  13. - "$NODE_DIR"
  14. - node_modules
  15. - "${HOME}/google-cloud-sdk"
  16. addons:
  17. apt:
  18. update: true
  19. packages:
  20. - libxmlsec1-dev
  21. - libgeoip-dev
  22. chrome: stable
  23. env:
  24. global:
  25. - NODE_ENV=development
  26. - PIP_DISABLE_PIP_VERSION_CHECK=on
  27. - SENTRY_LIGHT_BUILD=1
  28. - SENTRY_SKIP_BACKEND_VALIDATION=1
  29. - SOUTH_TESTS_MIGRATE=1
  30. - DJANGO_VERSION=">=1.6.11,<1.7"
  31. # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
  32. - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
  33. - YARN_VERSION="1.3.2"
  34. script:
  35. - make travis-lint-$TEST_SUITE
  36. - make travis-test-$TEST_SUITE
  37. - make travis-scan-$TEST_SUITE
  38. # installing dependencies for after_* steps here ensures they get cached
  39. # since those steps execute after travis runs `store build cache`
  40. - pip install codecov
  41. - npm install -g @zeus-ci/cli
  42. after_success:
  43. - codecov -e TEST_SUITE
  44. after_failure:
  45. - dmesg | tail -n 100
  46. after_script:
  47. - zeus upload -t "text/xml+xunit" .artifacts/*.junit.xml
  48. -t "text/xml+coverage" .artifacts/*.coverage.xml
  49. -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
  50. -t "text/html+pytest" .artifacts/*.pytest.html
  51. -t "text/plain+pycodestyle" .artifacts/*.pycodestyle.log
  52. -t "text/xml+checkstyle" .artifacts/*.checkstyle.xml
  53. -t "application/webpack-stats+json" .artifacts/webpack-stats.json
  54. # each job in the matrix inherits `env/global` and uses everything above,
  55. # but custom `services`, `before_install`, `install`, and `before_script` directives
  56. # may be defined to define and setup individual job environments with more precision.
  57. matrix:
  58. fast_finish: true
  59. include:
  60. # only the sqlite suite runs riak tests
  61. - python: 2.7
  62. env: TEST_SUITE=sqlite DB=sqlite
  63. services:
  64. - memcached
  65. - riak
  66. - redis-server
  67. install:
  68. - pip install -e ".[dev,tests,optional]"
  69. - python: 2.7
  70. env: TEST_SUITE=postgres DB=postgres
  71. services:
  72. - memcached
  73. - redis-server
  74. - postgresql
  75. install:
  76. - pip install -e ".[dev,tests,optional]"
  77. before_script:
  78. - psql -c 'create database sentry;' -U postgres
  79. - python: 2.7
  80. env: TEST_SUITE=mysql DB=mysql
  81. services:
  82. - memcached
  83. - mysql
  84. - redis-server
  85. install:
  86. - pip install -e ".[dev,tests,optional]"
  87. - pip install mysqlclient
  88. before_script:
  89. - mysql -u root -e 'create database sentry;'
  90. - python: 2.7
  91. env: TEST_SUITE=acceptance
  92. services:
  93. - memcached
  94. - redis-server
  95. - postgresql
  96. before_install:
  97. - find "$NODE_DIR" -type d -empty -delete
  98. - nvm install
  99. - npm install -g "yarn@${YARN_VERSION}"
  100. install:
  101. - yarn install --pure-lockfile
  102. - pip install -e ".[dev,tests,optional]"
  103. - wget -N "http://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip" -P ~/
  104. - unzip ~/chromedriver_linux64.zip -d ~/
  105. - rm ~/chromedriver_linux64.zip
  106. - sudo install -m755 ~/chromedriver /usr/local/bin/
  107. before_script:
  108. - psql -c 'create database sentry;' -U postgres
  109. - python: 2.7
  110. env: TEST_SUITE=js
  111. before_install:
  112. - find "$NODE_DIR" -type d -empty -delete
  113. - nvm install
  114. - npm install -g "yarn@${YARN_VERSION}"
  115. install:
  116. - yarn install --pure-lockfile
  117. - python: 2.7
  118. env: TEST_SUITE=cli
  119. services:
  120. - postgresql
  121. - redis-server
  122. install:
  123. - pip install -e .
  124. before_script:
  125. - psql -c 'create database sentry;' -U postgres
  126. - python: 2.7
  127. env: TEST_SUITE=dist
  128. - python: 2.7
  129. env: SENTRY_TAGSTORE=sentry.tagstore.v2.V2TagStorage TEST_SUITE=postgres DB=postgres
  130. services:
  131. - memcached
  132. - redis-server
  133. - postgresql
  134. install:
  135. - pip install -e ".[dev,tests,optional]"
  136. before_script:
  137. - psql -c 'create database sentry;' -U postgres
  138. # django 1.8 in testing - allowed to fail
  139. - python: 2.7
  140. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9"
  141. services:
  142. - memcached
  143. - redis-server
  144. - postgresql
  145. install:
  146. - pip install -e ".[dev,tests,optional]"
  147. before_script:
  148. - psql -c 'create database sentry;' -U postgres
  149. # snuba in testing - allowed to fail
  150. - python: 2.7
  151. env: TEST_SUITE=snuba SENTRY_TAGSTORE=sentry.tagstore.snuba.SnubaTagStorage
  152. services:
  153. - docker
  154. - memcached
  155. - redis-server
  156. - postgresql
  157. before_install:
  158. - docker run -d --name clickhouse-server -p 9000:9000 -p 9009:9009 -p 8123:8123 --ulimit nofile=262144:262144 yandex/clickhouse-server
  159. - docker run -d --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=clickhouse-server:9000 --name snuba -p 1218:1218 --link clickhouse-server:clickhouse-server getsentry/snuba
  160. - docker ps -a
  161. install:
  162. - pip install -e ".[dev,tests,optional]"
  163. before_script:
  164. - psql -c 'create database sentry;' -U postgres
  165. # Deploy 'storybook' (component & style guide) - allowed to fail
  166. - language: node_js
  167. env: STORYBOOK_BUILD=1
  168. before_install:
  169. # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
  170. - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
  171. # If the SDK is not already cached, download it and unpack it
  172. - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
  173. - tar -xzf credentials.tar.gz
  174. # Use the decrypted service account credentials to authenticate the command line tool
  175. - gcloud auth activate-service-account --key-file client-secret.json
  176. - npm install -g "yarn@${YARN_VERSION}"
  177. install:
  178. - yarn install --pure-lockfile
  179. - gcloud version
  180. script: bash .travis/deploy-storybook.sh
  181. after_success: skip
  182. after_failure: skip
  183. # jobs are defined in matrix/include
  184. # to specify which jobs are allowed to fail, match the env exactly in matrix/allow_failures
  185. allow_failures:
  186. - python: 2.7
  187. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9"
  188. - python: 2.7
  189. env: TEST_SUITE=snuba SENTRY_TAGSTORE=sentry.tagstore.snuba.SnubaTagStorage
  190. - language: node_js
  191. env: STORYBOOK_BUILD=1
  192. notifications:
  193. webhooks:
  194. urls:
  195. - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
  196. on_success: always
  197. on_failure: always
  198. on_start: always
  199. on_cancel: always
  200. on_error: always