1version: 2.1
2
3default_resource_class: &default_resource_class medium
4cimg_base_image: &cimg_base_image cimg/base:stable
5python39_image: &python39_image cimg/python:3.9
6ddtrace_dev_image: &ddtrace_dev_image datadog/dd-trace-py:buster
7datadog_agent_image: &datadog_agent_image datadog/agent:latest
8redis_image: &redis_image redis:4.0-alpine
9memcached_image: &memcached_image memcached:1.5-alpine
10cassandra_image: &cassandra_image cassandra:3.11.7
11consul_image: &consul_image consul:1.6.0
12moto_image: &moto_image palazzem/moto:1.0.1
13mysql_image: &mysql_image mysql:5.7
14postgres_image: &postgres_image postgres:10.5-alpine
15mongo_image: &mongo_image mongo:3.6
16httpbin_image: &httpbin_image kennethreitz/httpbin@sha256:2c7abc4803080c22928265744410173b6fea3b898872c01c5fd0f0f9df4a59fb
17vertica_image: &vertica_image sumitchawla/vertica:latest
18rabbitmq_image: &rabbitmq_image rabbitmq:3.7-alpine
19
20orbs:
21  win: circleci/windows@2.2.0
22
23machine_executor: &machine_executor
24  machine:
25    image: ubuntu-2004:202107-02
26  environment:
27    - BOTO_CONFIG: /dev/null
28    # https://support.circleci.com/hc/en-us/articles/360045268074-Build-Fails-with-Too-long-with-no-output-exceeded-10m0s-context-deadline-exceeded-
29    - PYTHONUNBUFFERED: 1
30  steps:
31    - &pyenv-set-global
32      run:
33        name: Set global pyenv
34        command: |
35          pyenv global 3.9.4
36
37contrib_job: &contrib_job
38  executor: ddtrace_dev
39  parallelism: 4
40
41contrib_job_small: &contrib_job_small
42  executor: ddtrace_dev_small
43  parallelism: 1
44
45commands:
46  save_coverage:
47    description: "Save coverage.py results to workspace"
48    steps:
49      - run: |
50          set -ex
51          mkdir coverage
52          if [ -f .coverage ];
53          then
54            cp .coverage ./coverage/$CIRCLE_BUILD_NUM-$CIRCLE_JOB-$CIRCLE_NODE_INDEX.coverage
55          fi
56      - persist_to_workspace:
57          root: coverage
58          paths:
59            - "*.coverage"
60      - store_artifacts:
61          path: coverage
62
63  setup_tox:
64    description: "Install tox"
65    steps:
66      - run: pip install -U tox
67
68  setup_riot:
69    description: "Install riot"
70    steps:
71      # Make sure we install and run riot on Python 3
72      - run: pip3 install riot
73
74  restore_tox_cache:
75    description: "Restore .tox directory from previous runs for faster installs"
76    steps:
77      - restore_cache:
78          # In the cache key:
79          #   - .Environment.CIRCLE_JOB: We do separate tox environments by job name, so caching and restoring is
80          #                              much faster.
81          key: tox-cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "tox.ini" }}-{{ checksum "setup.py" }}
82
83  save_tox_cache:
84    description: "Save .tox directory into cache for faster installs next time"
85    steps:
86      - save_cache:
87          # In the cache key:
88          #   - .Environment.CIRCLE_JOB: We do separate tox environments by job name, so caching and restoring is
89          #                              much faster.
90          key: tox-cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "tox.ini" }}-{{ checksum "setup.py" }}
91          paths:
92            - ".tox"
93
94  run_test:
95    description: "Run tests matching a pattern"
96    parameters:
97      pattern:
98        type: string
99        default: ""
100      wait:
101        type: string
102        default: ""
103      snapshot:
104        type: boolean
105        default: false
106      docker_services:
107        type: string
108        default: ""
109      store_coverage:
110        type: boolean
111        default: true
112    steps:
113      - attach_workspace:
114          at: .
115      - checkout
116      - when:
117          condition:
118              << parameters.snapshot >>
119          steps:
120            - run: SNAPSHOT_CI=1 docker-compose up -d testagent << parameters.docker_services >>
121            - run:
122                command: docker-compose logs -f
123                background: true
124            - run:
125                environment:
126                  DD_TRACE_AGENT_URL: http://localhost:9126
127
128                command: |
129                  mv .riot .ddriot
130                  echo -p2.7,-p3.5,-p3.6,-p3.7,-p3.8,-p3.9 | tr ',' '\n' | circleci tests split | xargs ./scripts/ddtest riot -v run --exitfirst --pass-env -s '<< parameters.pattern >>'
131      - unless:
132          condition:
133              << parameters.snapshot >>
134          steps:
135            - when:
136                condition:
137                  << parameters.wait >>
138                steps:
139                  - setup_tox
140                  - run:
141                      name: "Waiting for << parameters.wait >>"
142                      command: tox -e 'wait' << parameters.wait >>
143            - setup_riot
144            - run:
145                command: "echo -p2.7,-p3.5,-p3.6,-p3.7,-p3.8,-p3.9 | tr ',' '\n' | circleci tests split | xargs riot -v run --exitfirst --pass-env -s '<< parameters.pattern >>'"
146      - when:
147          condition:
148            << parameters.store_coverage >>
149          steps:
150            - save_coverage
151      - store_test_results:
152          path: test-results
153      - store_artifacts:
154          path: test-results
155
156  run_tox_scenario_with_testagent:
157    description: Run scripts/run-tox-scenario with setup, caching persistence and the testagent
158    parameters:
159      pattern:
160        type: string
161      wait:
162        type: string
163        default: ""
164    steps:
165      - checkout
166      - restore_tox_cache
167      - when:
168          condition:
169            << parameters.wait >>
170          steps:
171            - run:
172                name: "Waiting for << parameters.wait >>"
173                command: tox -e 'wait' << parameters.wait >>
174      - run: SNAPSHOT_CI=1 docker-compose up -d memcached redis testagent
175      - run:
176          command: docker-compose logs -f
177          background: true
178      - run:
179          name: "Run scripts/run-tox-scenario"
180          environment:
181            DD_TRACE_AGENT_URL: http://localhost:9126
182          command: ./scripts/ddtest scripts/run-tox-scenario '<< parameters.pattern >>'
183      - save_tox_cache
184
185  run_tox_scenario:
186    description: "Run scripts/run-tox-scenario with setup, caching and persistence"
187    parameters:
188      pattern:
189        type: string
190      wait:
191        type: string
192        default: ""
193      store_coverage:
194        type: boolean
195        default: true
196    steps:
197      - checkout
198      - setup_tox
199      - restore_tox_cache
200      - when:
201          condition:
202            << parameters.wait >>
203          steps:
204            - run:
205                name: "Waiting for << parameters.wait >>"
206                command: tox -e 'wait' << parameters.wait >>
207      - run:
208          name: "Run scripts/run-tox-scenario"
209          command: scripts/run-tox-scenario '<< parameters.pattern >>'
210      - save_tox_cache
211      - when:
212          condition:
213            << parameters.store_coverage >>
214          steps:
215            - save_coverage
216      - store_test_results:
217          path: test-results
218      - store_artifacts:
219          path: test-results
220
221executors:
222  cimg_base:
223    docker:
224      - image: *cimg_base_image
225    resource_class: small
226  python39:
227    docker:
228      - image: *python39_image
229    resource_class: small
230  ddtrace_dev:
231    docker:
232      - image: *ddtrace_dev_image
233    resource_class: *default_resource_class
234  ddtrace_dev_small:
235    docker:
236      - image: *ddtrace_dev_image
237    resource_class: small
238
239# Common configuration blocks as YAML anchors
240# See: https://circleci.com/blog/circleci-hacks-reuse-yaml-in-your-circleci-config-with-yaml/
241httpbin_local: &httpbin_local
242  image: *httpbin_image
243  name: httpbin.org
244
245datadog_agent: &datadog_agent
246  image: *datadog_agent_image
247  environment:
248    DD_API_KEY: invalid_key_but_this_is_fine
249    DD_APM_ENABLED: true
250    DD_APM_RECEIVER_SOCKET: /tmp/ddagent/trace.sock
251    DD_BIND_HOST: 0.0.0.0
252    DD_LOG_LEVEL: DEBUG
253
254mysql_server: &mysql_server
255  image: *mysql_image
256  environment:
257    - MYSQL_ROOT_PASSWORD=admin
258    - MYSQL_PASSWORD=test
259    - MYSQL_USER=test
260    - MYSQL_DATABASE=test
261
262postgres_server: &postgres_server
263  image: *postgres_image
264  environment:
265    - POSTGRES_PASSWORD=postgres
266    - POSTGRES_USER=postgres
267    - POSTGRES_DB=postgres
268
269jobs:
270  pre_check:
271    executor: python39
272    steps:
273      - checkout
274      - setup_riot
275      - run:
276          name: "Black check"
277          command: riot run -s black --check .
278      - run:
279          name: "Flake8 check"
280          command: riot run -s flake8
281      - run:
282          name: "Mypy check"
283          command: riot run -s mypy
284      - run:
285          name: "Codespell check"
286          command: riot run -s codespell
287
288  ccheck:
289    executor: cimg_base
290    steps:
291      - checkout
292      - run: sudo apt-get update
293      - run: sudo apt-get install --yes clang-format gcc-10 python3 python3-setuptools python3-pip
294      - run: scripts/cformat.sh
295      - run: DD_COMPILE_DEBUG=1 CC=gcc-10 pip -vvv install .
296
297  coverage_report:
298    executor: python39
299    steps:
300      - checkout
301      - attach_workspace:
302          at: .
303      - run: pip install coverage codecov diff_cover
304      - run: ls -hal *.coverage
305      # Combine all job coverage reports into one
306      - run: coverage combine *.coverage
307      # Upload coverage report to Codecov
308      # DEV: Do not use the bash uploader, it cannot be trusted
309      - run: codecov
310      # Generate and save xml report
311      # DEV: "--ignore-errors" to skip over files that are missing
312      - run: coverage xml --ignore-errors
313      - store_artifacts:
314          path: coverage.xml
315      # Generate and save JSON report
316      # DEV: "--ignore-errors" to skip over files that are missing
317      - run: coverage json --ignore-errors
318      - store_artifacts:
319          path: coverage.json
320      # Generate and save HTML report
321      # DEV: "--ignore-errors" to skip over files that are missing
322      - run: coverage html --ignore-errors
323      - store_artifacts:
324          path: htmlcov
325      # Print ddtrace/ report to stdout
326      # DEV: "--ignore-errors" to skip over files that are missing
327      - run: coverage report --ignore-errors --omit=tests/
328      # Print tests/ report to stdout
329      # DEV: "--ignore-errors" to skip over files that are missing
330      - run: coverage report --ignore-errors --omit=ddtrace/
331      # Print diff-cover report to stdout (compares against origin/master)
332      - run: diff-cover --compare-branch origin/master coverage.xml
333
334  build_base_venvs:
335    executor: ddtrace_dev
336    parallelism: 6
337    steps:
338      - checkout
339      - setup_riot
340      - run:
341          name: "Run riotfile.py tests"
342          command: riot run -s riot-helpers
343      - run:
344          name: "Generate base virtual environments."
345          command: "echo '2.7,3.5,3.6,3.7,3.8,3.9' | tr ',' '\n' | circleci tests split | xargs -I PY riot -v generate --python=PY"
346      - persist_to_workspace:
347          root: .
348          paths:
349            - "."
350
351  tracer:
352    <<: *contrib_job
353    steps:
354      - run_test:
355          pattern: "tracer"
356
357  opentracer:
358    <<: *contrib_job
359    steps:
360      - run_tox_scenario:
361          pattern: '^py..-opentracer'
362
363  profile:
364    <<: *contrib_job
365    parallelism: 6
366    steps:
367      - run_tox_scenario:
368          store_coverage: false
369          pattern: '^py.\+-profile'
370
371  integration_agent5:
372    <<: *machine_executor
373    steps:
374      - checkout
375      - run: docker-compose up -d ddagent5
376      - run:
377          command: docker-compose logs -f
378          background: true
379      - run:
380          command: ./scripts/ddtest scripts/run-tox-scenario '^py..-integration-v5'
381
382  integration_agent:
383    <<: *machine_executor
384    steps:
385      - checkout
386      - run: docker-compose up -d ddagent
387      - run:
388          command: docker-compose logs -f
389          background: true
390      - run:
391          command: ./scripts/ddtest scripts/run-tox-scenario '^py..-integration-latest'
392
393  integration_testagent:
394    <<: *machine_executor
395    steps:
396      - checkout
397      - run: SNAPSHOT_CI=1 docker-compose up -d testagent
398      - run:
399          command: docker-compose logs -f
400          background: true
401      - run:
402          environment:
403            DD_TRACE_AGENT_URL: http://localhost:9126
404          command: ./scripts/ddtest scripts/run-tox-scenario '^py..-integration-snapshot'
405
406  vendor:
407    <<: *contrib_job_small
408    docker:
409      - image: *ddtrace_dev_image
410    steps:
411      - run_test:
412          pattern: 'vendor'
413
414  futures:
415    <<: *contrib_job_small
416    steps:
417      - run_tox_scenario:
418          pattern: '^futures_contrib-'
419
420  boto:
421    <<: *machine_executor
422    parallelism: 4
423    steps:
424      - run_test:
425          pattern: '^boto'  # run boto and botocore
426          snapshot: true
427          docker_services: "localstack"
428
429  ddtracerun:
430    <<: *contrib_job
431    docker:
432      - image: *ddtrace_dev_image
433      - image: *redis_image
434    steps:
435      - run_test:
436          store_coverage: false
437          pattern: 'ddtracerun'
438
439  test_logging:
440    <<: *contrib_job_small
441    steps:
442      - run_test:
443          pattern: 'test_logging'
444
445  asyncio:
446    <<: *contrib_job_small
447    steps:
448      - run_tox_scenario:
449          pattern: '^asyncio_contrib-'
450
451  pylons:
452    <<: *contrib_job
453    steps:
454      - run_tox_scenario:
455          pattern: '^pylons_contrib-'
456
457  aiohttp:
458    <<: *contrib_job
459    steps:
460      - run_test:
461          pattern: 'aiohttp'
462
463  asgi:
464    <<: *contrib_job_small
465    steps:
466      - run_test:
467          pattern: 'asgi$'
468
469  tornado:
470    <<: *contrib_job
471    steps:
472      - run_tox_scenario:
473          pattern: '^tornado_contrib-'
474
475  bottle:
476    <<: *contrib_job
477    steps:
478      - run_tox_scenario:
479          pattern: '^bottle_contrib\(_autopatch\)\?-'
480
481  cassandra:
482    <<: *contrib_job
483    docker:
484      - image: *ddtrace_dev_image
485        environment:
486          CASS_DRIVER_NO_EXTENSIONS: 1
487      - image: *cassandra_image
488        environment:
489          - MAX_HEAP_SIZE=512M
490          - HEAP_NEWSIZE=256M
491    steps:
492      - run_test:
493          wait: cassandra
494          pattern: 'cassandra'
495
496  celery:
497    <<: *contrib_job
498    parallelism: 6
499    docker:
500      - image: *ddtrace_dev_image
501      - image: redis:4.0-alpine
502      - image: *rabbitmq_image
503    steps:
504      - run_test:
505          pattern: 'celery'
506
507  cherrypy:
508    <<: *machine_executor
509    parallelism: 6
510    steps:
511      - run_test:
512          pattern: 'cherrypy'
513          snapshot: true
514
515  consul:
516    <<: *contrib_job
517    docker:
518      - image: *ddtrace_dev_image
519      - image: *consul_image
520    steps:
521      - run_tox_scenario:
522          pattern: '^consul_contrib-'
523
524  dogpile_cache:
525    <<: *contrib_job
526    steps:
527      - run_tox_scenario:
528          pattern: '^dogpile_contrib-'
529
530  elasticsearch:
531    <<: *machine_executor
532    parallelism: 4
533    steps:
534      - run_test:
535          pattern: 'elasticsearch'
536          snapshot: true
537          docker_services: 'elasticsearch'
538
539  falcon:
540    <<: *contrib_job
541    steps:
542      - run_test:
543          pattern: 'falcon'
544
545  django:
546    <<: *machine_executor
547    parallelism: 6
548    steps:
549      - run_test:
550          pattern: 'django$'
551          snapshot: true
552          docker_services: "memcached redis postgres"
553
554  django_hosts:
555    <<: *machine_executor
556    steps:
557      - run_test:
558          pattern: 'django_hosts$'
559          snapshot: true
560
561  djangorestframework:
562    <<: *machine_executor
563    parallelism: 6
564    steps:
565      - run_test:
566          pattern: 'djangorestframework'
567          snapshot: true
568          docker_services: "memcached redis"
569
570  fastapi:
571    <<: *machine_executor
572    steps:
573      - run_test:
574          pattern: "fastapi"
575          snapshot: true
576
577  flask:
578    <<: *contrib_job
579    docker:
580      - image: *ddtrace_dev_image
581      - image: *redis_image
582      - image: *memcached_image
583    steps:
584      - run_test:
585          # Run both flask and flask_cache test suites
586          pattern: 'flask(_cache)?'
587
588  gevent:
589    <<: *contrib_job
590    steps:
591      - run_tox_scenario:
592          pattern: '^gevent_contrib-'
593
594  grpc:
595    <<: *machine_executor
596    parallelism: 6
597    steps:
598      - run_test:
599          pattern: "grpc"
600          snapshot: true
601
602  httplib:
603    <<: *contrib_job
604    steps:
605      - run_tox_scenario:
606          pattern: '^httplib_contrib'
607
608  httpx:
609    <<: *machine_executor
610    steps:
611      - run_test:
612          pattern: 'httpx'
613          snapshot: true
614          docker_services: 'httpbin_local'
615
616  mariadb:
617    <<: *machine_executor
618    steps:
619      - run_test:
620          pattern: 'mariadb$'
621          snapshot: true
622          docker_services: "mariadb"
623
624  molten:
625    <<: *contrib_job
626    steps:
627      - run_tox_scenario:
628          pattern: '^molten_contrib-'
629
630  mysqlconnector:
631    <<: *contrib_job
632    docker:
633      - image: *ddtrace_dev_image
634      - *mysql_server
635    steps:
636      - run_test:
637          wait: mysql
638          pattern: 'mysql'
639
640  mysqlpython:
641    <<: *contrib_job
642    docker:
643      - image: *ddtrace_dev_image
644      - *mysql_server
645    steps:
646      - run_tox_scenario:
647          wait: mysql
648          pattern: '^mysqldb_contrib-.*-mysqlclient'
649
650  pymysql:
651    <<: *contrib_job
652    docker:
653      - image: *ddtrace_dev_image
654      - *mysql_server
655    steps:
656      - run_tox_scenario:
657          wait: mysql
658          pattern: '^pymysql_contrib-'
659
660  pylibmc:
661    <<: *contrib_job
662    docker:
663      - image: *ddtrace_dev_image
664      - image: *memcached_image
665    steps:
666      - run_tox_scenario:
667          pattern: '^pylibmc_contrib-'
668
669  pytest:
670    executor: ddtrace_dev
671    steps:
672      - run_test:
673          pattern: 'pytest'
674
675  pymemcache:
676    <<: *contrib_job
677    docker:
678      - image: *ddtrace_dev_image
679      - image: *memcached_image
680    steps:
681      - run_test:
682          pattern: "pymemcache"
683
684  mongoengine:
685    <<: *machine_executor
686    parallelism: 1
687    steps:
688      - run_test:
689          pattern: 'mongoengine'
690          snapshot: true
691          docker_services: 'mongo'
692
693  pymongo:
694    <<: *contrib_job
695    docker:
696      - image: *ddtrace_dev_image
697      - image: *mongo_image
698    steps:
699      - run_test:
700          pattern: "pymongo"
701
702  pynamodb:
703    <<: *contrib_job
704    steps:
705      - run_test:
706          pattern: "pynamodb"
707
708  pyodbc:
709    <<: *contrib_job
710    docker:
711      - image: *ddtrace_dev_image
712    steps:
713      - run_tox_scenario:
714          pattern: '^pyodbc_contrib-'
715
716  pyramid:
717    <<: *machine_executor
718    steps:
719      - run_test:
720          pattern: 'pyramid'
721          snapshot: true
722
723  requests:
724    <<: *contrib_job
725    docker:
726      - image: *ddtrace_dev_image
727      - *httpbin_local
728    steps:
729      - run_test:
730          pattern: "requests"
731
732  requestsgevent:
733    <<: *contrib_job
734    steps:
735      - run_tox_scenario:
736          pattern: '^requests_gevent_contrib-'
737
738  sanic:
739    <<: *contrib_job
740    steps:
741      - run_tox_scenario:
742          pattern: '^sanic_contrib-'
743
744  snowflake:
745    <<: *machine_executor
746    parallelism: 4
747    steps:
748      - run_test:
749          pattern: "snowflake"
750          snapshot: true
751
752  starlette:
753    <<: *machine_executor
754    steps:
755      - run_test:
756          pattern: "starlette"
757          snapshot: true
758
759  sqlalchemy:
760    <<: *contrib_job
761    docker:
762      - image: *ddtrace_dev_image
763      - *postgres_server
764      - *mysql_server
765    steps:
766      - run_test:
767          wait: postgres mysql
768          pattern: "sqlalchemy"
769
770  dbapi:
771    <<: *contrib_job
772    steps:
773      - run_tox_scenario:
774          pattern: '^dbapi_contrib-'
775
776  psycopg:
777    <<: *machine_executor
778    parallelism: 4
779    steps:
780      - run_test:
781          pattern: "psycopg"
782          snapshot: true
783          docker_services: "postgres"
784
785  aiobotocore:
786    <<: *contrib_job
787    docker:
788      - image: *ddtrace_dev_image
789      - image: *moto_image
790    steps:
791       - run_test:
792          pattern: 'aiobotocore'
793
794  aiopg:
795    <<: *contrib_job
796    docker:
797      - image: *ddtrace_dev_image
798      - *postgres_server
799    steps:
800      - run_test:
801          wait: postgres
802          pattern: 'aiopg'
803
804  aredis:
805    <<: *machine_executor
806    parallelism: 4
807    steps:
808      - run_test:
809          docker_services: 'redis'
810          pattern: 'aredis$'
811          snapshot: true
812
813  yaaredis:
814    <<: *machine_executor
815    parallelism: 4
816    steps:
817      - run_test:
818          docker_services: 'redis'
819          pattern: 'yaaredis$'
820          snapshot: true
821
822  redis:
823    <<: *machine_executor
824    parallelism: 4
825    steps:
826      - run_test:
827          docker_services: 'redis'
828          pattern: 'redis$'
829          snapshot: true
830
831  rediscluster:
832    <<: *machine_executor
833    steps:
834      - run_test:
835          pattern: 'rediscluster'
836          docker_services: 'rediscluster'
837          snapshot: true
838
839  rq:
840    <<: *machine_executor
841    parallelism: 4
842    steps:
843      - run_test:
844          pattern: "rq"
845          snapshot: true
846          docker_services: "redis"
847
848  urllib3:
849    <<: *machine_executor
850    steps:
851      - run_test:
852          pattern: 'urllib3'
853          snapshot: true
854          docker_services: "httpbin_local"
855
856  vertica:
857    <<: *contrib_job
858    docker:
859      - image: *ddtrace_dev_image
860      - image: *vertica_image
861        environment:
862          - VP_TEST_USER=dbadmin
863          - VP_TEST_PASSWORD=abc123
864          - VP_TEST_DATABASE=docker
865    steps:
866      - run_tox_scenario:
867          wait: vertica
868          pattern: '^vertica_contrib-'
869
870  wsgi:
871    <<: *machine_executor
872    steps:
873      - run_test:
874          pattern: "wsgi"
875          snapshot: true
876
877  kombu:
878    <<: *contrib_job
879    docker:
880      - image: *ddtrace_dev_image
881      - image: *rabbitmq_image
882    steps:
883      - run_tox_scenario:
884          wait: rabbitmq
885          pattern: '^kombu_contrib-'
886
887  sqlite3:
888    <<: *contrib_job
889    steps:
890      - run_tox_scenario:
891          pattern: '^sqlite3_contrib-'
892
893  benchmarks:
894    <<: *contrib_job
895    steps:
896      - run_test:
897          store_coverage: false
898          pattern: '^benchmarks'
899
900  jinja2:
901    <<: *contrib_job
902    steps:
903      - run_test:
904          pattern: 'jinja2'
905
906  mako:
907    <<: *contrib_job_small
908    steps:
909      - run_test:
910          pattern: 'mako'
911
912  algoliasearch:
913    <<: *contrib_job
914    steps:
915      - run_tox_scenario:
916          pattern: '^algoliasearch_contrib-'
917
918  build_docs:
919    # build documentation and store as an artifact
920    executor: ddtrace_dev
921    steps:
922      - setup_riot
923      - checkout
924      - run:
925          command: |
926             riot -v run docs
927             mkdir -p /tmp/docs
928             cp -r docs/_build/html/* /tmp/docs
929      - store_artifacts:
930          path: /tmp/docs
931
932requires_pre_check: &requires_pre_check
933  requires:
934    - pre_check
935    - ccheck
936
937requires_base_venvs: &requires_base_venvs
938  requires:
939    - pre_check
940    - ccheck
941    - build_base_venvs
942
943requires_tests: &requires_tests
944  requires:
945    - aiobotocore
946    - aiohttp
947    - aiopg
948    - asyncio
949    - algoliasearch
950    - asgi
951    - benchmarks
952    - boto
953    - bottle
954    - cassandra
955    - celery
956    - cherrypy
957    - consul
958    - dbapi
959    - ddtracerun
960    - dogpile_cache
961    - django
962    - django_hosts
963    - djangorestframework
964    - elasticsearch
965    - falcon
966    - fastapi
967    - flask
968    - futures
969    - gevent
970    - grpc
971    - httplib
972    - httpx
973    - integration_agent5
974    - integration_agent
975    - integration_testagent
976    - vendor
977    - profile
978    - jinja2
979    - kombu
980    - mako
981    - mariadb
982    - molten
983    - mongoengine
984    - mysqlconnector
985    - mysqlpython
986    - opentracer
987    - psycopg
988    - pylibmc
989    - pylons
990    - pymemcache
991    - pymongo
992    - pymysql
993    - pynamodb
994    - pyodbc
995    - pyramid
996    - pytest
997    - aredis
998    - yaaredis
999    - redis
1000    - rediscluster
1001    - requests
1002    - rq
1003    - sanic
1004    - snowflake
1005    - sqlalchemy
1006    - sqlite3
1007    - starlette
1008    - test_logging
1009    - tracer
1010    - tornado
1011    - urllib3
1012    - vertica
1013    - wsgi
1014
1015workflows:
1016  version: 2
1017  test:
1018    jobs:
1019      # Pre-checking before running all jobs
1020      - pre_check
1021      - ccheck
1022
1023      # Build necessary base venvs for integration tests
1024      - build_base_venvs
1025
1026      # Docs
1027      - build_docs: *requires_pre_check
1028
1029      # Integration test suites
1030      - aiobotocore: *requires_base_venvs
1031      - aiohttp: *requires_base_venvs
1032      - aiopg: *requires_base_venvs
1033      - asyncio: *requires_base_venvs
1034      - algoliasearch: *requires_base_venvs
1035      - asgi: *requires_base_venvs
1036      - benchmarks: *requires_base_venvs
1037      - boto: *requires_base_venvs
1038      - bottle: *requires_base_venvs
1039      - cassandra: *requires_base_venvs
1040      - celery: *requires_base_venvs
1041      - cherrypy: *requires_base_venvs
1042      - consul: *requires_base_venvs
1043      - dbapi: *requires_base_venvs
1044      - ddtracerun: *requires_base_venvs
1045      - django: *requires_base_venvs
1046      - django_hosts: *requires_base_venvs
1047      - djangorestframework: *requires_base_venvs
1048      - dogpile_cache: *requires_base_venvs
1049      - elasticsearch: *requires_base_venvs
1050      - falcon: *requires_base_venvs
1051      - fastapi: *requires_base_venvs
1052      - flask: *requires_base_venvs
1053      - futures: *requires_base_venvs
1054      - gevent: *requires_base_venvs
1055      - grpc: *requires_base_venvs
1056      - httplib: *requires_base_venvs
1057      - httpx: *requires_base_venvs
1058      - integration_agent5: *requires_base_venvs
1059      - integration_agent: *requires_base_venvs
1060      - integration_testagent: *requires_base_venvs
1061      - vendor: *requires_base_venvs
1062      - profile: *requires_base_venvs
1063      - jinja2: *requires_base_venvs
1064      - kombu: *requires_base_venvs
1065      - mako: *requires_base_venvs
1066      - mariadb: *requires_base_venvs
1067      - molten: *requires_base_venvs
1068      - mongoengine: *requires_base_venvs
1069      - mysqlconnector: *requires_base_venvs
1070      - mysqlpython: *requires_base_venvs
1071      - opentracer: *requires_base_venvs
1072      - psycopg: *requires_base_venvs
1073      - pylibmc: *requires_base_venvs
1074      - pylons: *requires_base_venvs
1075      - pymemcache: *requires_base_venvs
1076      - pymongo: *requires_base_venvs
1077      - pymysql: *requires_base_venvs
1078      - pynamodb: *requires_base_venvs
1079      - pyodbc: *requires_base_venvs
1080      - pyramid: *requires_base_venvs
1081      - pytest: *requires_base_venvs
1082      - aredis: *requires_base_venvs
1083      - yaaredis: *requires_base_venvs
1084      - redis: *requires_base_venvs
1085      - rediscluster: *requires_base_venvs
1086      - requests: *requires_base_venvs
1087      - rq: *requires_base_venvs
1088      - sanic: *requires_base_venvs
1089      - snowflake: *requires_base_venvs
1090      - starlette: *requires_base_venvs
1091      - sqlalchemy: *requires_base_venvs
1092      - sqlite3: *requires_base_venvs
1093      - test_logging: *requires_base_venvs
1094      - tornado: *requires_base_venvs
1095      - tracer: *requires_base_venvs
1096      - urllib3: *requires_base_venvs
1097      - vertica: *requires_base_venvs
1098      - wsgi: *requires_base_venvs
1099
1100      # Final reports
1101      - coverage_report: *requires_tests
1102