.travis.yml 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. dist: trusty
  2. sudo: required
  3. group: deprecated-2017Q4
  4. language: python
  5. python: 2.7
  6. branches:
  7. only:
  8. - master
  9. cache:
  10. yarn: true
  11. directories:
  12. - "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
  13. - "$NODE_DIR"
  14. - node_modules
  15. - "${HOME}/google-cloud-sdk"
  16. addons:
  17. apt:
  18. update: true
  19. packages:
  20. - libxmlsec1-dev
  21. - libgeoip-dev
  22. chrome: stable
  23. env:
  24. global:
  25. - NODE_ENV=development
  26. - PIP_DISABLE_PIP_VERSION_CHECK=on
  27. - PIP_QUIET=1
  28. - SENTRY_LIGHT_BUILD=1
  29. - SENTRY_SKIP_BACKEND_VALIDATION=1
  30. - SOUTH_TESTS_MIGRATE=1
  31. - DJANGO_VERSION=">=1.6.11,<1.7"
  32. # node's version is pinned by .nvmrc and is autodetected by `nvm install`.
  33. - NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
  34. - YARN_VERSION="1.13.0"
  35. script:
  36. # certain commands require sentry init to be run, but this is only true for
  37. # running things within Travis
  38. - make travis-test-$TEST_SUITE
  39. - make travis-scan-$TEST_SUITE
  40. # installing dependencies for after_* steps here ensures they get cached
  41. # since those steps execute after travis runs `store build cache`
  42. after_failure:
  43. - dmesg | tail -n 100
  44. after_script:
  45. - |
  46. coverage_files=$(ls .artifacts/*coverage.xml || true)
  47. if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
  48. pip install codecov
  49. codecov -e TEST_SUITE
  50. fi
  51. - npm install -g @zeus-ci/cli
  52. - zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
  53. - zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
  54. - zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
  55. - zeus upload -t "text/html+pytest" .artifacts/*pytest.html
  56. - zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
  57. - zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
  58. - zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
  59. base_postgres: &postgres_default
  60. python: 2.7
  61. services:
  62. - memcached
  63. - redis-server
  64. - postgresql
  65. install:
  66. - python setup.py install_egg_info
  67. - pip install -e ".[dev,tests,optional]"
  68. before_script:
  69. - psql -c 'create database sentry;' -U postgres
  70. before_install:
  71. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  72. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  73. - docker ps -a
  74. base_acceptance: &acceptance_default
  75. python: 2.7
  76. services:
  77. - docker
  78. - memcached
  79. - redis-server
  80. - postgresql
  81. before_install:
  82. - find "$NODE_DIR" -type d -empty -delete
  83. - nvm install
  84. - npm install -g "yarn@${YARN_VERSION}"
  85. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  86. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  87. - docker ps -a
  88. install:
  89. - yarn install --pure-lockfile
  90. - python setup.py install_egg_info
  91. - pip install -e ".[dev,tests,optional]"
  92. - wget -N "https://chromedriver.storage.googleapis.com/2.45/chromedriver_linux64.zip" -P ~/
  93. - unzip ~/chromedriver_linux64.zip -d ~/
  94. - rm ~/chromedriver_linux64.zip
  95. - sudo install -m755 ~/chromedriver /usr/local/bin/
  96. before_script:
  97. - psql -c 'create database sentry;' -U postgres
  98. # each job in the matrix inherits `env/global` and uses everything above,
  99. # but custom `services`, `before_install`, `install`, and `before_script` directives
  100. # may be defined to define and setup individual job environments with more precision.
  101. matrix:
  102. fast_finish: true
  103. include:
  104. # Lint python and javascript together
  105. - python: 2.7
  106. name: 'Linter'
  107. env: TEST_SUITE=lint
  108. install:
  109. - find "$NODE_DIR" -type d -empty -delete
  110. - nvm install
  111. - npm install -g "yarn@${YARN_VERSION}"
  112. - pip install -r requirements-dev.txt
  113. - yarn install --pure-lockfile
  114. - <<: *postgres_default
  115. name: 'Backend [Postgres] (1/2)'
  116. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  117. - <<: *postgres_default
  118. name: 'Backend [Postgres] (2/2)'
  119. env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  120. # django 1.8 compatibility
  121. - <<: *postgres_default
  122. name: 'Backend [Postgres] (Django 1.8) (1/2)'
  123. env: TEST_SUITE=postgres DB=postgres DJANGO_VERSION=">=1.8,<1.9" TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  124. - <<: *postgres_default
  125. name: 'Backend [Postgres] (Django 1.8) (2/2)'
  126. env: TEST_SUITE=postgres DB=postgres DJANGO_VERSION=">=1.8,<1.9" TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  127. # django 1.8 compatibility with migrations
  128. - <<: *postgres_default
  129. name: 'Backend [Postgres] (Django 1.8, No migrations) (1/2)'
  130. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9" SOUTH_TESTS_MIGRATE=0 TOTAL_TEST_GROUPS=2 TEST_GROUP=0
  131. - <<: *postgres_default
  132. name: 'Backend [Postgres] (Django 1.8, No migrations) (2/2)'
  133. env: TEST_SUITE=postgres DJANGO_VERSION=">=1.8,<1.9" SOUTH_TESTS_MIGRATE=0 TOTAL_TEST_GROUPS=2 TEST_GROUP=1
  134. # XXX(markus): Remove after rust interfaces are done
  135. - <<: *postgres_default
  136. name: 'Backend [Postgres] (Rust Interface Renormalization)'
  137. env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
  138. - python: 2.7
  139. name: 'Backend [Riak]'
  140. env: TEST_SUITE=riak DB=postgres
  141. services:
  142. - memcached
  143. - redis-server
  144. - postgresql
  145. - riak
  146. install:
  147. - python setup.py install_egg_info
  148. - pip install -e ".[dev,tests,optional]"
  149. before_script:
  150. - psql -c 'create database sentry;' -U postgres
  151. before_install:
  152. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  153. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  154. - docker ps -a
  155. - <<: *acceptance_default
  156. name: 'Acceptance'
  157. env: TEST_SUITE=acceptance USE_SNUBA=1
  158. # XXX(markus): Remove after rust interfaces are done
  159. - <<: *acceptance_default
  160. python: 2.7
  161. name: 'Acceptance (Rust Interface Renormalization)'
  162. env: TEST_SUITE=acceptance USE_SNUBA=1 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1 PERCY_ENABLE=0
  163. - python: 2.7
  164. name: 'Frontend'
  165. env: TEST_SUITE=js
  166. before_install:
  167. - find "$NODE_DIR" -type d -empty -delete
  168. - nvm install
  169. - npm install -g "yarn@${YARN_VERSION}"
  170. install:
  171. - yarn install --pure-lockfile
  172. - python: 2.7
  173. name: 'Command Line'
  174. env: TEST_SUITE=cli
  175. services:
  176. - postgresql
  177. - redis-server
  178. install:
  179. - python setup.py install_egg_info
  180. - pip install -e .
  181. before_script:
  182. - psql -c 'create database sentry;' -U postgres
  183. - python: 2.7
  184. name: 'Distribution build'
  185. env: TEST_SUITE=dist
  186. before_install:
  187. - find "$NODE_DIR" -type d -empty -delete
  188. - nvm install
  189. - npm install -g "yarn@${YARN_VERSION}"
  190. - <<: *postgres_default
  191. name: 'Symbolicator Integration'
  192. env: TEST_SUITE=symbolicator
  193. before_install:
  194. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  195. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  196. - docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
  197. - docker ps -a
  198. # snuba in testing
  199. - python: 2.7
  200. name: 'Snuba Integration'
  201. env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
  202. services:
  203. - docker
  204. - memcached
  205. - redis-server
  206. - postgresql
  207. before_install:
  208. - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
  209. - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
  210. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  211. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  212. - docker ps -a
  213. install:
  214. - python setup.py install_egg_info
  215. - pip install -e ".[dev,tests,optional]"
  216. - pip install confluent-kafka
  217. before_script:
  218. - psql -c 'create database sentry;' -U postgres
  219. # XXX(markus): Remove after rust interfaces are done
  220. - python: 2.7
  221. name: 'Snuba Integration (Rust Interface Renormalization)'
  222. env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
  223. services:
  224. - docker
  225. - memcached
  226. - redis-server
  227. - postgresql
  228. before_install:
  229. - docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
  230. - docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
  231. - docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
  232. - docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
  233. - docker ps -a
  234. install:
  235. - python setup.py install_egg_info
  236. - pip install -e ".[dev,tests,optional]"
  237. - pip install confluent-kafka
  238. before_script:
  239. - psql -c 'create database sentry;' -U postgres
  240. # Deploy 'storybook' (component & style guide) - allowed to fail
  241. - language: node_js
  242. name: 'Storybook Deploy'
  243. env: STORYBOOK_BUILD=1
  244. before_install:
  245. # Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
  246. - openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
  247. # If the SDK is not already cached, download it and unpack it
  248. - if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
  249. - tar -xzf credentials.tar.gz
  250. # Use the decrypted service account credentials to authenticate the command line tool
  251. - gcloud auth activate-service-account --key-file client-secret.json
  252. - npm install -g "yarn@${YARN_VERSION}"
  253. install:
  254. - yarn install --pure-lockfile
  255. - gcloud version
  256. script: bash .travis/deploy-storybook.sh
  257. after_success: skip
  258. after_failure: skip
  259. # jobs are defined in matrix/include
  260. # to specify which jobs are allowed to fail, match the env exactly in matrix/allow_failures
  261. allow_failures:
  262. - language: node_js
  263. env: STORYBOOK_BUILD=1
  264. # XXX(markus): Remove after rust interfaces are done
  265. - env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
  266. - env: TEST_SUITE=symbolicator
  267. notifications:
  268. webhooks:
  269. urls:
  270. - https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
  271. on_success: always
  272. on_failure: always
  273. on_start: always
  274. on_cancel: always
  275. on_error: always