.travis.yml 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. dist: trusty
  2. sudo: required
  3. group: deprecated-2017Q4
  4. language: python
  5. python: 2.7
  6. branches:
  7. only:
  8. - master
  9. cache:
  10. yarn: true
  11. directories:
  12. - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
  13. - "$NODE_DIR"
  14. - node_modules
  15. - "${HOME}/google-cloud-sdk"
  16. addons:
  17. apt:
  18. update: true
  19. packages:
  20. - libxmlsec1-dev
  21. - libgeoip-dev
  22. chrome: stable
  23. env:
  24. global:
  25. - NODE_ENV=development
  26. - PIP_DISABLE_PIP_VERSION_CHECK=on
  27. - PIP_QUIET=1
  28. - SENTRY_LIGHT_BUILD=1
  29. - SENTRY_SKIP_BACKEND_VALIDATION=1
  30. - SOUTH_TESTS_MIGRATE=1
  31. - DJANGO_VERSION=">=1.6.11,<1.7"
  32. # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
  33. - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
  34. - YARN_VERSION="1.13.0"
  35. script:
  36. - make travis-lint-$TEST_SUITE
  37. - make travis-test-$TEST_SUITE
  38. - make travis-scan-$TEST_SUITE
  39. # installing dependencies for after_* steps here ensures they get cached
  40. # since those steps execute after travis runs `store build cache`
  41. - pip install codecov
  42. - npm install -g @zeus-ci/cli
  43. after_success:
  44. - codecov -e TEST_SUITE
  45. after_failure:
  46. - dmesg | tail -n 100
  47. after_script:
  48. - zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
  49. - zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
  50. - zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
  51. - zeus upload -t "text/html+pytest" .artifacts/*pytest.html
  52. - zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
  53. - zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
  54. - zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
  55. # each job in the matrix inherits `env/global` and uses everything above,
  56. # but custom `services`, `before_install`, `install`, and `before_script` directives
  57. # may be defined to define and setup individual job environments with more precision.
  58. matrix:
  59. fast_finish: true
  60. include:
  61. # only the sqlite suite runs riak tests
  62. - python: 2.7
  63. env: TEST_SUITE=sqlite DB=sqlite
  64. services:
  65. - memcached
  66. - riak
  67. - redis-server
  68. install:
  69. - python setup.py install_egg_info
  70. - pip install -e ".[dev,tests,optional]"
  71. - python: 2.7
  72. env: TEST_SUITE=postgres DB=postgres
  73. services:
  74. - memcached
  75. - redis-server
  76. - postgresql
  77. install:
  78. - python setup.py install_egg_info
  79. - pip install -e ".[dev,tests,optional]"
  80. before_script:
  81. - psql -c 'create database sentry;' -U postgres
  82. - python: 2.7
  83. env: TEST_SUITE=mysql DB=mysql
  84. services:
  85. - memcached
  86. - mysql
  87. - redis-server
  88. install:
  89. - python setup.py install_egg_info
  90. - pip install -e ".[dev,tests,optional]"
  91. # 1.3.14 causes test failures. Pinning to 1.3.13 for now, hopefully
  92. # a later release resolves this.
  93. - pip install mysqlclient==1.3.13
  94. before_script:
  95. - mysql -u root -e 'create database sentry;'
  96. - python: 2.7
  97. env: TEST_SUITE=acceptance
  98. services:
  99. - memcached
  100. - redis-server
  101. - postgresql
  102. before_install:
  103. - find "$NODE_DIR" -type d -empty -delete
  104. - nvm install
  105. - npm install -g "yarn@${YARN_VERSION}"
  106. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  107. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  108. - docker ps -a
  109. install:
  110. - yarn install --pure-lockfile
  111. - python setup.py install_egg_info
  112. - pip install -e ".[dev,tests,optional]"
  113. - wget -N "https://chromedriver.storage.googleapis.com/2.45/chromedriver_linux64.zip" -P ~/
  114. - unzip ~/chromedriver_linux64.zip -d ~/
  115. - rm ~/chromedriver_linux64.zip
  116. - sudo install -m755 ~/chromedriver /usr/local/bin/
  117. before_script:
  118. - psql -c 'create database sentry;' -U postgres
  119. - python: 2.7
  120. env: TEST_SUITE=js
  121. before_install:
  122. - find "$NODE_DIR" -type d -empty -delete
  123. - nvm install
  124. - npm install -g "yarn@${YARN_VERSION}"
  125. install:
  126. - yarn install --pure-lockfile
  127. - python: 2.7
  128. env: TEST_SUITE=cli
  129. services:
  130. - postgresql
  131. - redis-server
  132. install:
  133. - python setup.py install_egg_info
  134. - pip install -e .
  135. before_script:
  136. - psql -c 'create database sentry;' -U postgres
  137. - python: 2.7
  138. env: TEST_SUITE=dist
  139. before_install:
  140. - find "$NODE_DIR" -type d -empty -delete
  141. - nvm install
  142. - npm install -g "yarn@${YARN_VERSION}"
  143. # django 1.8 compatibility
  144. - python: 2.7
  145. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9"
  146. services:
  147. - memcached
  148. - redis-server
  149. - postgresql
  150. install:
  151. - python setup.py install_egg_info
  152. - pip install -e ".[dev,tests,optional]"
  153. before_script:
  154. - psql -c 'create database sentry;' -U postgres
  155. # django 1.8 compatibility with migrations
  156. - python: 2.7
  157. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9" SOUTH_TESTS_MIGRATE=0
  158. services:
  159. - memcached
  160. - redis-server
  161. - postgresql
  162. install:
  163. - python setup.py install_egg_info
  164. - pip install -e ".[dev,tests,optional]"
  165. before_script:
  166. - psql -c 'create database sentry;' -U postgres
  167. # snuba in testing
  168. - python: 2.7
  169. env: TEST_SUITE=snuba SENTRY_TAGSTORE=sentry.tagstore.snuba.SnubaTagStorage SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
  170. services:
  171. - docker
  172. - memcached
  173. - redis-server
  174. - postgresql
  175. before_install:
  176. - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
  177. - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
  178. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  179. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  180. - docker ps -a
  181. install:
  182. - python setup.py install_egg_info
  183. - pip install -e ".[dev,tests,optional]"
  184. - pip install confluent-kafka
  185. before_script:
  186. - psql -c 'create database sentry;' -U postgres
  187. # Deploy 'storybook' (component & style guide) - allowed to fail
  188. - language: node_js
  189. env: STORYBOOK_BUILD=1
  190. before_install:
  191. # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
  192. - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
  193. # If the SDK is not already cached, download it and unpack it
  194. - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
  195. - tar -xzf credentials.tar.gz
  196. # Use the decrypted service account credentials to authenticate the command line tool
  197. - gcloud auth activate-service-account --key-file client-secret.json
  198. - npm install -g "yarn@${YARN_VERSION}"
  199. install:
  200. - yarn install --pure-lockfile
  201. - gcloud version
  202. script: bash .travis/deploy-storybook.sh
  203. after_success: skip
  204. after_failure: skip
  205. # jobs are defined in matrix/include
  206. # to specify which jobs are allowed to fail, match the env exactly in matrix/allow_failures
  207. allow_failures:
  208. - language: node_js
  209. env: STORYBOOK_BUILD=1
  210. notifications:
  211. webhooks:
  212. urls:
  213. - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
  214. on_success: always
  215. on_failure: always
  216. on_start: always
  217. on_cancel: always
  218. on_error: always