Disable Travis online tests
This commit is contained in:
72
.travis.yml
72
.travis.yml
@@ -1,82 +1,24 @@
|
||||
sudo: false
|
||||
language: python
|
||||
conditions: v1
|
||||
python: 3.6
|
||||
|
||||
# There are two accounts made specifically for Travis, and the passwords are really only encrypted for obscurity
|
||||
# The global env variables `client1_email`, `client1_password`, `client2_email`, `client2_password` and `group_id`
|
||||
# are set on the Travis Settings page
|
||||
|
||||
# The tests are run with `Limit concurrent jobs = 1`, since the tests can't use the clients simultaneously
|
||||
cache: pip
|
||||
|
||||
before_install: pip install flit
|
||||
# Use `--deps production` so that we don't install unnecessary dependencies
|
||||
install: flit install --deps production --extras test
|
||||
|
||||
cache:
|
||||
pip: true
|
||||
# Pytest caching is disabled, since TravisCI instances have different public IPs. Facebook doesn't like that,
|
||||
# and redirects you to the url `/checkpoint/block`, where you have to change the account's password
|
||||
# directories:
|
||||
# - .pytest_cache
|
||||
script: pytest -m offline
|
||||
|
||||
jobs:
|
||||
include:
|
||||
# The tests are split into online and offline versions.
|
||||
# The online tests are only run against the master branch.
|
||||
# Because:
|
||||
# Travis caching is per-branch and per-job, so even though we cache the Facebook sessions via. `.pytest_cache`
|
||||
# and in `tests.utils.load_client`, we need 6 new sessions per branch. This is usually the point where Facebook
|
||||
# starts complaining, and we have to manually fix it
|
||||
|
||||
# Run online tests in all the supported python versions
|
||||
- &test-online
|
||||
if: (branch = master OR tag IS present) AND type != pull_request
|
||||
stage: online tests
|
||||
script: scripts/travis-online
|
||||
- <<: *test-online
|
||||
python: 2.7
|
||||
- python: 2.7
|
||||
before_install:
|
||||
- sudo apt-get -y install python3-pip python3-setuptools
|
||||
- sudo pip3 install flit
|
||||
install: flit install --python python --deps production --extras test
|
||||
- <<: *test-online
|
||||
python: 3.4
|
||||
- <<: *test-online
|
||||
python: 3.5
|
||||
- <<: *test-online
|
||||
python: pypy
|
||||
|
||||
# Run the expensive tests, with the python version most likely to break, aka. 2
|
||||
- <<: *test-online
|
||||
# Only run if the commit message includes [ci all] or [all ci]
|
||||
if: commit_message =~ /\[ci\s+all\]|\[all\s+ci\]/
|
||||
python: 2.7
|
||||
before_install:
|
||||
- sudo apt-get -y install python3-pip python3-setuptools
|
||||
- sudo pip3 install flit
|
||||
install: flit install --python python --deps production --extras test
|
||||
env: PYTEST_ADDOPTS='-m expensive'
|
||||
|
||||
# Run offline tests in all the supported python versions
|
||||
- &test-offline
|
||||
# Ideally, it'd be nice to run the offline tests in every build, but since we can't run jobs concurrently (yet),
|
||||
# we'll disable them when they're not needed, and include them inside the online tests instead
|
||||
if: not ((branch = master OR tag IS present) AND type != pull_request)
|
||||
stage: offline tests
|
||||
script: scripts/travis-offline
|
||||
- <<: *test-offline
|
||||
python: 2.7
|
||||
before_install:
|
||||
- sudo apt-get -y install python3-pip python3-setuptools
|
||||
- sudo pip3 install flit
|
||||
install: flit install --python python --deps production --extras test
|
||||
- <<: *test-offline
|
||||
python: 3.4
|
||||
- <<: *test-offline
|
||||
python: 3.5
|
||||
- <<: *test-offline
|
||||
python: pypy
|
||||
- python: 3.4
|
||||
- python: 3.5
|
||||
- python: 3.6
|
||||
- python: pypy
|
||||
|
||||
- stage: deploy
|
||||
name: PyPI
|
||||
|
@@ -1,5 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
python -m pytest -m offline --color=yes
|
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
if ! python -m pytest --color=yes; then
|
||||
echo << EOF
|
||||
-----------------------------------------------------------------
|
||||
-----------------------------------------------------------------
|
||||
-----------------------------------------------------------------
|
||||
|
||||
Some tests failed! Rerunning them, since they can be kinda flaky.
|
||||
|
||||
-----------------------------------------------------------------
|
||||
-----------------------------------------------------------------
|
||||
-----------------------------------------------------------------
|
||||
EOF
|
||||
python -m pytest --last-failed --color=yes
|
||||
fi
|
Reference in New Issue
Block a user