commit f5b4c2386141ebfc0b6844ecbea0c341b75969c0 Author: William Grzybowski Date: Mon Sep 23 21:44:48 2019 +0200 Import sentry-python_0.12.2.orig.tar.gz [dgit import orig sentry-python_0.12.2.orig.tar.gz] diff --git a/.craft.yml b/.craft.yml new file mode 100644 index 0000000..6da0897 --- /dev/null +++ b/.craft.yml @@ -0,0 +1,16 @@ +--- +minVersion: '0.5.1' +github: + owner: getsentry + repo: sentry-python +targets: + - name: pypi + - name: github + - name: gh-pages + - name: registry + type: sdk + config: + canonical: pypi:sentry-sdk + +changelog: CHANGES.md +changelogPolicy: simple diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..8336f8c --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +ignore = E203, E266, E501, W503, E402, E731, C901, B950 +max-line-length = 80 +max-complexity = 18 +select = B,C,E,F,W,T4,B9 +exclude=checkouts,lol*,.tox diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3d55dc9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,24 @@ +*.pyc +*.log +*.egg +*.db +*.pid +.python-version +.coverage* +.DS_Store +.tox +pip-log.txt +*.egg-info +/build +/dist +.cache +.idea +.eggs +venv +.venv +.vscode/tags +.pytest_cache +.hypothesis +semaphore +pip-wheel-metadata +.mypy_cache diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..f6d010c --- /dev/null +++ b/.travis.yml @@ -0,0 +1,71 @@ +language: python + +python: + - "2.7" + - "pypy" + - "3.4" + - "3.5" + - "3.6" + +env: + - SENTRY_PYTHON_TEST_POSTGRES_USER=postgres SENTRY_PYTHON_TEST_POSTGRES_NAME=travis_ci_test + +cache: + pip: true + cargo: true + +branches: + only: + - master + - /^release\/.+$/ + +matrix: + allow_failures: + - python: "3.8-dev" + + include: + - python: "3.7" + dist: xenial + - python: "3.8-dev" + dist: xenial + + - name: Linting + python: "3.6" + install: + - pip install tox + script: tox -e linters + - python: "3.6" + name: Distribution packages + install: false + script: make travis-upload-dist + - python: "3.6" + name: Build documentation + install: false + script: make travis-upload-docs + +before_script: + - psql -c 'create database travis_ci_test;' -U postgres + - psql -c 'create database test_travis_ci_test;' -U postgres + +services: + - postgresql + +install: + - pip install tox + - pip install codecov + - bash scripts/download-semaphore.sh + +script: + - coverage erase + - ./scripts/runtox.sh '' --cov=sentry_sdk --cov-report= --cov-branch + - codecov --file .coverage* + +notifications: + webhooks: + urls: + - https://zeus.ci/hooks/7ebb3060-90d8-11e8-aa04-0a580a282e07/public/provider/travis/webhook + on_success: always + on_failure: always + on_start: always + on_cancel: always + on_error: always diff --git a/CHANGES.md b/CHANGES.md new file mode 100644 index 0000000..30311b0 --- /dev/null +++ b/CHANGES.md @@ -0,0 +1,425 @@ +# Changelog and versioning + +## Versioning Policy + +This project follows [semver](https://semver.org/), with three additions: + +* Semver says that major version `0` can include breaking changes at any time. + Still, it is common practice to assume that only `0.x` releases (minor + versions) can contain breaking changes while `0.x.y` releases (patch + versions) are used for backwards-compatible changes (bugfixes and features). + This project also follows that practice. + +* All undocumented APIs are considered internal. They are not part of this + contract. + +* Certain features (e.g. integrations) may be explicitly called out as + "experimental" or "unstable" in the documentation. They come with their own + versioning policy described in the documentation. + +We recommend to pin your version requirements against `0.x.*` or `0.x.y`. +Either one of the following is fine: + +``` +sentry-sdk>=0.10.0,<0.11.0 +sentry-sdk==0.10.1 +``` + +A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. + +## 0.12.2 + +* Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. + +## 0.12.1 + +* Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. + +## 0.12.0 + +* Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. +* Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. +* APM: Add spans for more methods on `subprocess.Popen` objects. +* APM: Add spans for Django middlewares. +* APM: Add spans for ASGI requests. +* Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** + +## 0.11.2 + +* Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. +* Add missing data to Redis breadcrumbs. + +## 0.11.1 + +* Remove a faulty assertion (observed in environment with Django Channels and ASGI). + +## 0.11.0 + +* Fix type hints for the logging integration. Thansk Steven Dignam! +* Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! +* Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! +* Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. +* More instrumentation for APM. +* New integration for SQLAlchemy (creates breadcrumbs from queries). +* New (experimental) integration for Apache Beam. +* Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. +* The `AiohttpIntegration` now sets the event's transaction name. +* Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. + +## 0.10.2 + +* Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. +* Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. +* Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. +* Fix a memory leak in the new tracing feature when it is not enabled. + +## 0.10.1 + +* Fix bug where the SDK would yield a deprecation warning about + `collections.abc` vs `collections`. +* Fix bug in stdlib integration that would cause spawned subprocesses to not + inherit the environment variables from the parent process. + +## 0.10.0 + +* Massive refactor in preparation to tracing. There are no intentional breaking + changes, but there is a risk of breakage (hence the minor version bump). Two + new client options `traces_sample_rate` and `traceparent_v2` have been added. + Do not change the defaults in production, they will bring your application + down or at least fill your Sentry project up with nonsense events. + +## 0.9.5 + +* Do not use ``getargspec`` on Python 3 to evade deprecation + warning. + +## 0.9.4 + +* Revert a change in 0.9.3 that prevented passing a ``unicode`` + string as DSN to ``init()``. + +## 0.9.3 + +* Add type hints for ``init()``. +* Include user agent header when sending events. + +## 0.9.2 + +* Fix a bug in the Django integration that would prevent the user + from initializing the SDK at the top of `settings.py`. + + This bug was introduced in 0.9.1 for all Django versions, but has been there + for much longer for Django 1.6 in particular. + +## 0.9.1 + +* Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to + leak event data between requests. +* Fix a bug where the GNU backtrace integration would not parse certain frames. +* Fix a bug where the SDK would not pick up request bodies for Django Rest + Framework based apps. +* Remove a few more headers containing sensitive data per default. +* Various improvements to type hints. Thanks Ran Benita! +* Add a event hint to access the log record from `before_send`. +* Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! +* Fix distribution information for mypy support (add `py.typed` file). Thanks + Ran Benita! + +## 0.9.0 + +* The SDK now captures `SystemExit` and other `BaseException`s when coming from + within a WSGI app (Flask, Django, ...) +* Pyramid: No longer report an exception if there exists an exception view for + it. + +## 0.8.1 + +* Fix infinite recursion bug in Celery integration. + +## 0.8.0 + +* Add the always_run option in excepthook integration. +* Fix performance issues when attaching large data to events. This is not + really intended to be a breaking change, but this release does include a + rewrite of a larger chunk of code, therefore the minor version bump. + +## 0.7.14 + +* Fix crash when using Celery integration (`TypeError` when using + `apply_async`). + +## 0.7.13 + +* Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. +* Add experimental support for tracing PoC. + +## 0.7.12 + +* Read from `X-Real-IP` for user IP address. +* Fix a bug that would not apply in-app rules for attached callstacks. +* It's now possible to disable automatic proxy support by passing + `http_proxy=""`. Thanks Marco Neumann! + +## 0.7.11 + +* Fix a bug that would send `errno` in an invalid format to the server. +* Fix import-time crash when running Python with `-O` flag. +* Fix a bug that would prevent the logging integration from attaching `extra` + keys called `data`. +* Fix order in which exception chains are reported to match Raven behavior. +* New integration for the Falcon web framework. Thanks to Jacob Magnusson! + +## 0.7.10 + +* Add more event trimming. +* Log Sentry's response body in debug mode. +* Fix a few bad typehints causing issues in IDEs. +* Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. + redirects) as errors. +* Fix a bug that would prevent use of `in_app_exclude` without + setting `in_app_include`. +* Fix a bug where request bodies of Django Rest Framework apps were not captured. +* Suppress errors during SQL breadcrumb capturing in Django + integration. Also change order in which formatting strategies + are tried. + +## 0.7.9 + +* New integration for the Bottle web framework. Thanks to Stepan Henek! +* Self-protect against broken mapping implementations and other broken reprs + instead of dropping all local vars from a stacktrace. Thanks to Marco + Neumann! + +## 0.7.8 + +* Add support for Sanic versions 18 and 19. +* Fix a bug that causes an SDK crash when using composed SQL from psycopg2. + +## 0.7.7 + +* Fix a bug that would not capture request bodies if they were empty JSON + arrays, objects or strings. +* New GNU backtrace integration parses stacktraces from exception messages and + appends them to existing stacktrace. +* Capture Tornado formdata. +* Support Python 3.6 in Sanic and AIOHTTP integration. +* Clear breadcrumbs before starting a new request. +* Fix a bug in the Celery integration that would drop pending events during + worker shutdown (particularly an issue when running with `max_tasks_per_child + = 1`) +* Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the + WSGI environment or other data that we're also trying to serialize at the + same time. + +## 0.7.6 + +* Fix a bug where artificial frames for Django templates would not be marked as + in-app and would always appear as the innermost frame. Implement a heuristic + to show template frame closer to `render` or `parse` invocation. + +## 0.7.5 + +* Fix bug into Tornado integration that would send broken cookies to the server. +* Fix a bug in the logging integration that would ignore the client + option `with_locals`. + +## 0.7.4 + +* Read release and environment from process environment like the Raven SDK + does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. +* Fix a bug in the `serverless` integration where it would not push a new scope + for each function call (leaking tags and other things across calls). +* Experimental support for type hints. + +## 0.7.3 + +* Fix crash in AIOHTTP integration when integration was set up but disabled. +* Flask integration now adds usernames, email addresses based on the protocol + Flask-User defines on top of Flask-Login. +* New threading integration catches exceptions from crashing threads. +* New method `flush` on hubs and clients. New global `flush` function. +* Add decorator for serverless functions to fix common problems in those + environments. +* Fix a bug in the logging integration where using explicit handlers required + enabling the integration. + +## 0.7.2 + +* Fix `celery.exceptions.Retry` spamming in Celery integration. + +## 0.7.1 + +* Fix `UnboundLocalError` crash in Celery integration. + +## 0.7.0 + +* Properly display chained exceptions (PEP-3134). +* Rewrite celery integration to monkeypatch instead of using signals due to + bugs in Celery 3's signal handling. The Celery scope is also now available in + prerun and postrun signals. +* Fix Tornado integration to work with Tornado 6. +* Do not evaluate Django `QuerySet` when trying to capture local variables. + Also an internal hook was added to overwrite `repr` for local vars. + +## 0.6.9 + +* Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. + + > No longer access arbitrary sequences in local vars due to possible side effects. + +## 0.6.8 + +* No longer access arbitrary sequences in local vars due to possible side effects. + +## 0.6.7 + +* Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. +* Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. +* Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. +* Fix a bug where a crashing `before_send` would crash the SDK and app. +* Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. + +## 0.6.6 + +* Un-break API of internal `Auth` object that we use in Sentry itself. + +## 0.6.5 + +* Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. +* Ability to use subpaths in DSN. +* Ignore `django.request` logger. + +## 0.6.4 + +* Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. + +## 0.6.3 + +* New integration for Tornado +* Fix request data in Django, Flask and other WSGI frameworks leaking between events. +* Fix infinite recursion when sending more events in `before_send`. + +## 0.6.2 + +* Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. + +## 0.6.1 + +* New integration for aiohttp-server. +* Fix crash when reading hostname in broken WSGI environments. + +## 0.6.0 + +* Fix bug where a 429 without Retry-After would not be honored. +* Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. +* A WSGI middleware is now available for catching errors and adding context about the current request to them. +* Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. +* The Python 3.7 runtime for AWS Lambda is now supported. +* Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. +* Logging an exception will no longer add the exception as breadcrumb to the exception's own event. + +## 0.5.5 + +* New client option `ca_certs`. +* Fix crash with Django and psycopg2. + +## 0.5.4 + +* Fix deprecation warning in relation to the `collections` stdlib module. +* Fix bug that would crash Django and Flask when streaming responses are failing halfway through. + +## 0.5.3 + +* Fix bug where using `push_scope` with a callback would not pop the scope. +* Fix crash when initializing the SDK in `push_scope`. +* Fix bug where IP addresses were sent when `send_default_pii=False`. + +## 0.5.2 + +* Fix bug where events sent through the RQ integration were sometimes lost. +* Remove a deprecation warning about usage of `logger.warn`. +* Fix bug where large frame local variables would lead to the event being rejected by Sentry. + +## 0.5.1 + +* Integration for Redis Queue (RQ) + +## 0.5.0 + +* Fix a bug that would omit several debug logs during SDK initialization. +* Fix issue that sent a event key `""` Sentry wouldn't understand. +* **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. +* Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. +* Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. +* Additional attributes on log records are now put into `extra`. +* Integration for Pyramid. +* `sys.argv` is put into extra automatically. + +## 0.4.3 + +* Fix a bug that would leak WSGI responses. + +## 0.4.2 + +* Fix a bug in the Sanic integration that would leak data between requests. +* Fix a bug that would hide all debug logging happening inside of the built-in transport. +* Fix a bug that would report errors for typos in Django's shell. + +## 0.4.1 + +* Fix bug that would only show filenames in stacktraces but not the parent + directories. + +## 0.4.0 + +* Changed how integrations are initialized. Integrations are now + configured and enabled per-client. + +## 0.3.11 + +* Fix issue with certain deployment tools and the AWS Lambda integration. + +## 0.3.10 + +* Set transactions for Django like in Raven. Which transaction behavior is used + can be configured. +* Fix a bug which would omit frame local variables from stacktraces in Celery. +* New option: `attach_stacktrace` + +## 0.3.9 + +* Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. + +## 0.3.8 + +* Nicer log level for internal errors. + +## 0.3.7 + +* Remove `repos` configuration option. There was never a way to make use of + this feature. +* Fix a bug in `last_event_id`. +* Add Django SQL queries to breadcrumbs. +* Django integration won't set user attributes if they were already set. +* Report correct SDK version to Sentry. + +## 0.3.6 + +* Integration for Sanic + +## 0.3.5 + +* Integration for AWS Lambda +* Fix mojibake when encoding local variable values + +## 0.3.4 + +* Performance improvement when storing breadcrumbs + +## 0.3.3 + +* Fix crash when breadcrumbs had to be trunchated + +## 0.3.2 + +* Fixed an issue where some paths where not properly sent as absolute paths diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..8444020 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,60 @@ +# How to contribute to the Sentry Python SDK + +`sentry-sdk` is an ordinary Python package. You can install it with `pip +install -e .` into some virtualenv, edit the sourcecode and test out your +changes manually. + +## Community + +The public-facing channels for support and development of Sentry SDKs can be found on [Discord](https://discord.gg/Ww9hbqr). + +## Running tests and linters + +Make sure you have `virtualenv` installed, and the Python versions you care +about. You should have Python 2.7 and the latest Python 3 installed. + +You don't need to `workon` or `activate` anything, the `Makefile` will create +one for you. Run `make` or `make help` to list commands. + +## Releasing a new version + +We use [craft](https://github.com/getsentry/craft#python-package-index-pypi) to +release new versions. You need credentials for the `getsentry` PyPI user, and +must have `twine` installed globally. + +The usual release process goes like this: + +1. Go through git log and write new entry into `CHANGES.md`, commit to master +2. `craft p a.b.c` +3. `craft pp a.b.c` + +## Adding a new integration (checklist) + +1. Write the integration. + + * Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. + + * Everybody monkeypatches. That means: + + * Make sure to think about conflicts with other monkeypatches when monkeypatching. + + * You don't need to feel bad about it. + + * Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. + + * Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). + +2. Write the [docs](https://github.com/getsentry/sentry-docs). Answer the following questions: + + * What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. + + * Which version of the SDK supports which versions of the modules it hooks into? + + * One code example with basic setup. + + * Make sure to add integration page to `python/index.md` (people forget to do that all the time). + + Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. + +3. Merge docs after new version has been released (auto-deploys on merge). +4. (optional) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. May not be applicable or doable for all kinds of integrations. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..61555f1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,9 @@ +Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..86a2426 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include LICENSE +include sentry_sdk/py.typed diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d5dd833 --- /dev/null +++ b/Makefile @@ -0,0 +1,75 @@ +SHELL = /bin/bash + +VENV_PATH = .venv + +help: + @echo "Thanks for your interest in the Sentry Python SDK!" + @echo + @echo "make lint: Run linters" + @echo "make test: Run basic tests (not testing most integrations)" + @echo "make test-all: Run ALL tests (slow, closest to CI)" + @echo "make format: Run code formatters (destructive)" + @echo + @echo "Also make sure to read ./CONTRIBUTING.md" + @false + +.venv: + virtualenv -ppython3 $(VENV_PATH) + $(VENV_PATH)/bin/pip install tox + +dist: .venv + rm -rf dist build + $(VENV_PATH)/bin/python setup.py sdist bdist_wheel + +.PHONY: dist + +format: .venv + $(VENV_PATH)/bin/tox -e linters --notest + .tox/linters/bin/black . +.PHONY: format + +test: .venv + @$(VENV_PATH)/bin/tox -e py2.7,py3.7 +.PHONY: test + +test-all: .venv + @TOXPATH=$(VENV_PATH)/bin/tox sh ./scripts/runtox.sh +.PHONY: test-all + +check: lint test +.PHONY: check + +lint: .venv + @set -e && $(VENV_PATH)/bin/tox -e linters || ( \ + echo "================================"; \ + echo "Bad formatting? Run: make format"; \ + echo "================================"; \ + false) + +.PHONY: lint + +apidocs: .venv + @$(VENV_PATH)/bin/pip install --editable . + @$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt + @$(VENV_PATH)/bin/sphinx-build -W -b html docs/ docs/_build +.PHONY: apidocs + +apidocs-hotfix: apidocs + @$(VENV_PATH)/bin/pip install ghp-import + @$(VENV_PATH)/bin/ghp-import -pf docs/_build +.PHONY: apidocs-hotfix + +install-zeus-cli: + npm install -g @zeus-ci/cli +.PHONY: install-zeus-cli + +travis-upload-docs: apidocs install-zeus-cli + cd docs/_build && zip -r gh-pages ./ + zeus upload -t "application/zip+docs" docs/_build/gh-pages.zip \ + || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] +.PHONY: travis-upload-docs + +travis-upload-dist: dist install-zeus-cli + zeus upload -t "application/zip+wheel" dist/* \ + || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] +.PHONY: travis-upload-dist diff --git a/README.md b/README.md new file mode 100644 index 0000000..9af579a --- /dev/null +++ b/README.md @@ -0,0 +1,42 @@ +

+ + + +

+ +# sentry-python - Sentry SDK for Python + +[![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python) +[![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) +[![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) + +This is the next line of the Python SDK for [Sentry](http://sentry.io/), intended to replace the `raven` package on PyPI. + +```python +from sentry_sdk import init, capture_message + +init("mydsn@sentry.io/123") + +capture_message("Hello World") # Will create an event. + +raise ValueError() # Will also create an event. +``` + +To learn more about how to use the SDK: + +- [Getting started with the new SDK](https://docs.sentry.io/quickstart/?platform=python) +- [Configuration options](https://docs.sentry.io/error-reporting/configuration/?platform=python) +- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/context/?platform=python) +- [Integrations](https://docs.sentry.io/platforms/python/) + +Are you coming from raven-python? + +- [Cheatsheet: Migrating to the new SDK from Raven](https://forum.sentry.io/t/switching-to-sentry-python/4733) + +To learn about internals: + +- [API Reference](https://getsentry.github.io/sentry-python/) + +# License + +Licensed under the BSD license, see `LICENSE` diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..7d3de1e --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,79 @@ +# Python package +# Create and test a Python package on multiple Python versions. +# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: +# https://docs.microsoft.com/azure/devops/pipelines/languages/python + +trigger: + - master + +pr: none + +resources: + containers: + - container: postgres + image: "postgres:9.6" + ports: + - 5432:5432 + +jobs: + - job: run_tests + displayName: Tests + pool: + vmImage: "Ubuntu-16.04" + services: + postgres: postgres + strategy: + matrix: + Python27: + python.version: "2.7" + Python34: + python.version: "3.4" + Python35: + python.version: "3.5" + Python36: + python.version: "3.6" + Python37: + python.version: "3.7" + # Python 3.8 and PyPy will be soon added to the base VM image: + # https://github.com/Microsoft/azure-pipelines-tasks/pull/9866 + Python38: + python.version: "3.8-dev" + PyPy2: + python.version: "pypy2" + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: "$(python.version)" + architecture: "x64" + + - script: | + set -eux + docker ps -a + docker images -a + # FIXME: theoretically we can run psql commands from a docker container, but + # name resolution is a bit tricky here + sudo apt install -y postgresql-client + psql -c 'create database travis_ci_test;' -U postgres -h localhost + psql -c 'create database test_travis_ci_test;' -U postgres -h localhost + displayName: "Create Postgres users" + + - script: | + set -eux + python --version + pip --version + pip install tox + pip install codecov + sh scripts/download-semaphore.sh + displayName: "Install dependencies" + + - script: | + set -eux + coverage erase + ./scripts/runtox.sh '' --cov=sentry_sdk --cov-report= --cov-branch + codecov --file .coverage* + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_NAME: travis_ci_test + AZURE_PYTHON_VERSION: "$(python.version)" + displayName: "Run tests" diff --git a/docs-requirements.txt b/docs-requirements.txt new file mode 100644 index 0000000..03a072a --- /dev/null +++ b/docs-requirements.txt @@ -0,0 +1,4 @@ +sphinx +sphinx-rtd-theme +git+https://github.com/agronholm/sphinx-autodoc-typehints +typed_ast diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..e35d885 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +_build diff --git a/docs/_static/.gitkeep b/docs/_static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 0000000..01bef3e --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,9 @@ +======== +Main API +======== + +.. inherited-members necessary because of hack for Client and init methods + +.. automodule:: sentry_sdk + :members: + :inherited-members: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..80fb114 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- + +import os +import sys + +import typing + +typing.TYPE_CHECKING = True + +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +sys.path.insert(0, os.path.abspath("..")) + +# -- Project information ----------------------------------------------------- + +project = u"sentry-python" +copyright = u"2019, Sentry Team and Contributors" +author = u"Sentry Team and Contributors" + +release = "0.12.2" +version = ".".join(release.split(".")[:2]) # The short X.Y version. + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx_autodoc_typehints", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", + "sphinx.ext.intersphinx", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# + +on_rtd = os.environ.get("READTHEDOCS", None) == "True" + +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = "sentry-pythondoc" + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "sentry-python.tex", + u"sentry-python Documentation", + u"Sentry Team and Contributors", + "manual", + ) +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "sentry-python", u"sentry-python Documentation", [author], 1)] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "sentry-python", + u"sentry-python Documentation", + author, + "sentry-python", + "One line description of project.", + "Miscellaneous", + ) +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ["search.html"] + +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..ade1dc0 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,11 @@ +===================================== +sentry-python - Sentry SDK for Python +===================================== + +This is the API documentation for `Sentry's Python SDK +`_. For full documentation and other resources +visit the `GitHub repository `_. + +.. toctree:: + api + integrations diff --git a/docs/integrations.rst b/docs/integrations.rst new file mode 100644 index 0000000..a04d99d --- /dev/null +++ b/docs/integrations.rst @@ -0,0 +1,14 @@ +============ +Integrations +============ + +Logging +======= + +.. module:: sentry_sdk.integrations.logging + +.. autofunction:: ignore_logger + +.. autoclass:: EventHandler + +.. autoclass:: BreadcrumbHandler diff --git a/examples/basic.py b/examples/basic.py new file mode 100644 index 0000000..e6d928b --- /dev/null +++ b/examples/basic.py @@ -0,0 +1,35 @@ +import sentry_sdk +from sentry_sdk.integrations.excepthook import ExcepthookIntegration +from sentry_sdk.integrations.atexit import AtexitIntegration +from sentry_sdk.integrations.dedupe import DedupeIntegration +from sentry_sdk.integrations.stdlib import StdlibIntegration + + +sentry_sdk.init( + dsn="https://@sentry.io/", + default_integrations=False, + integrations=[ + ExcepthookIntegration(), + AtexitIntegration(), + DedupeIntegration(), + StdlibIntegration(), + ], + environment="Production", + release="1.0.0", + send_default_pii=False, + max_breadcrumbs=5, +) + +with sentry_sdk.push_scope() as scope: + scope.user = {"email": "john.doe@example.com"} + scope.set_tag("page_locale", "de-at") + scope.set_extra("request", {"id": "d5cf8a0fd85c494b9c6453c4fba8ab17"}) + scope.level = "warning" + sentry_sdk.capture_message("Something went wrong!") + +sentry_sdk.add_breadcrumb(category="auth", message="Authenticated user", level="info") + +try: + 1 / 0 +except Exception as e: + sentry_sdk.capture_exception(e) diff --git a/examples/tracing/README.md b/examples/tracing/README.md new file mode 100644 index 0000000..ae7b797 --- /dev/null +++ b/examples/tracing/README.md @@ -0,0 +1,14 @@ +To run this app: + +1. Have a Redis on the Redis default port (if you have Sentry running locally, + you probably already have this) +2. `pip install sentry-sdk flask rq` +3. `FLASK_APP=tracing flask run` +4. `FLASK_APP=tracing flask worker` +5. Go to `http://localhost:5000/` and enter a base64-encoded string (one is prefilled) +6. Hit submit, wait for heavy computation to end +7. `cat events | python traceviewer.py | dot -T svg > events.svg` +8. `open events.svg` + +The last two steps are for viewing the traces. Nothing gets sent to Sentry +right now because Sentry does not deal with this data yet. diff --git a/examples/tracing/events b/examples/tracing/events new file mode 100644 index 0000000..f68ae2b --- /dev/null +++ b/examples/tracing/events @@ -0,0 +1,10 @@ +{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "968cff94913ebb07"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "f9f4b21dd9da4c389426c1ffd2b62410", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "8eb30d5ae5f3403ba3a036e696111ec3", "span_id": "97e894108ff7a8cd"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1c71c7cb32934550bb49f05b6c2d4052", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "b7627895a90b41718be82d3ad21ab2f4", "span_id": "9fa95b4ffdcbe177"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1430ad5b0a0d45dca3f02c10271628f9", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "1636fdb33db84e7c9a4e606c1b176971", "span_id": "b682a29ead55075f"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js.map", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive"}}, "event_id": "72b1224307294e0fb6d6b1958076c4cc", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "compute", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "946edde6ee421874"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/compute/aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c72fd945c1174140a00bdbf6f6ed8fc5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} diff --git a/examples/tracing/events.svg b/examples/tracing/events.svg new file mode 100644 index 0000000..33f9c98 --- /dev/null +++ b/examples/tracing/events.svg @@ -0,0 +1,439 @@ + + + + + + +mytrace + + + +213977312221895837199412816265326724789 + +trace:index (a0fa8803753e40fd8124b21eeb2986b5) + + + +10848326615985732359 + +span:index (968cff94913ebb07) + + + +213977312221895837199412816265326724789->10848326615985732359 + + + + + +10695730148961032308 + +span:compute (946edde6ee421874) + + + +213977312221895837199412816265326724789->10695730148961032308 + + + + + +13788869053623754394 + +span:wait (bf5be759039ede9a) + + + +213977312221895837199412816265326724789->13788869053623754394 + + + + + +12886313978623292199 + +span:wait (b2d56249f7fdf327) + + + +213977312221895837199412816265326724789->12886313978623292199 + + + + + +12421771694198418854 + +span:wait (ac62ff8ae1b2eda6) + + + +213977312221895837199412816265326724789->12421771694198418854 + + + + + +10129474377767673784 + +span:http://httpbin.org/base64/aGVsbG8gd29ybGQK GET (8c931f4740435fb8) + + + +213977312221895837199412816265326724789->10129474377767673784 + + + + + +11252927259328145570 + +span:tracing.decode_base64 (9c2a6db8c79068a2) + + + +213977312221895837199412816265326724789->11252927259328145570 + + + + + +11354074206287318022 + +span:wait (9d91c6558b2e4c06) + + + +213977312221895837199412816265326724789->11354074206287318022 + + + + + +189680067412161401408211119957991300803 + +trace:static (8eb30d5ae5f3403ba3a036e696111ec3) + + + +10946161693179750605 + +span:static (97e894108ff7a8cd) + + + +189680067412161401408211119957991300803->10946161693179750605 + + + + + +243760014067241244567037757667822711540 + +trace:index (b7627895a90b41718be82d3ad21ab2f4) + + + +11504827122213183863 + +span:index (9fa95b4ffdcbe177) + + + +243760014067241244567037757667822711540->11504827122213183863 + + + + + +29528545588201242414770090507008174449 + +trace:static (1636fdb33db84e7c9a4e606c1b176971) + + + +13151252664271832927 + +span:static (b682a29ead55075f) + + + +29528545588201242414770090507008174449->13151252664271832927 + + + + + +10695730148961032308->10848326615985732359 + + + + + +10695730148961032308->10946161693179750605 + + + + + +10695730148961032308->11504827122213183863 + + + + + +10695730148961032308->13151252664271832927 + + + + + +10695730148961032308->11252927259328145570 + + + + + +13610234804785734989 + +13610234804785734989 + + + +13610234804785734989->10695730148961032308 + + + + + +13610234804785734989->13788869053623754394 + + + + + +13610234804785734989->12886313978623292199 + + + + + +13610234804785734989->12421771694198418854 + + + + + +13610234804785734989->11354074206287318022 + + + + + +13788869053623754394->10848326615985732359 + + + + + +13788869053623754394->10946161693179750605 + + + + + +13788869053623754394->11504827122213183863 + + + + + +13788869053623754394->13151252664271832927 + + + + + +12886313978623292199->10848326615985732359 + + + + + +12886313978623292199->10946161693179750605 + + + + + +12886313978623292199->11504827122213183863 + + + + + +12886313978623292199->13151252664271832927 + + + + + +12421771694198418854->10848326615985732359 + + + + + +12421771694198418854->10946161693179750605 + + + + + +12421771694198418854->11504827122213183863 + + + + + +12421771694198418854->13151252664271832927 + + + + + +12421771694198418854->10695730148961032308 + + + + + +12421771694198418854->13788869053623754394 + + + + + +12421771694198418854->12886313978623292199 + + + + + +10129474377767673784->10848326615985732359 + + + + + +10129474377767673784->10946161693179750605 + + + + + +10129474377767673784->11504827122213183863 + + + + + +10129474377767673784->13151252664271832927 + + + + + +10129474377767673784->10695730148961032308 + + + + + +10129474377767673784->13788869053623754394 + + + + + +10129474377767673784->12886313978623292199 + + + + + +11252927259328145570->10848326615985732359 + + + + + +11252927259328145570->10946161693179750605 + + + + + +11252927259328145570->11504827122213183863 + + + + + +11252927259328145570->13151252664271832927 + + + + + +11252927259328145570->10129474377767673784 + + + + + +11354074206287318022->10848326615985732359 + + + + + +11354074206287318022->10946161693179750605 + + + + + +11354074206287318022->11504827122213183863 + + + + + +11354074206287318022->13151252664271832927 + + + + + +11354074206287318022->10695730148961032308 + + + + + +11354074206287318022->13788869053623754394 + + + + + +11354074206287318022->12886313978623292199 + + + + + diff --git a/examples/tracing/static/tracing.js b/examples/tracing/static/tracing.js new file mode 100644 index 0000000..ad4dc9a --- /dev/null +++ b/examples/tracing/static/tracing.js @@ -0,0 +1,519 @@ +(function (__window) { +var exports = {}; +Object.defineProperty(exports, '__esModule', { value: true }); + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; + +function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** An error emitted by Sentry SDKs and related utilities. */ +var SentryError = /** @class */ (function (_super) { + __extends(SentryError, _super); + function SentryError(message) { + var _newTarget = this.constructor; + var _this = _super.call(this, message) || this; + _this.message = message; + // tslint:disable:no-unsafe-any + _this.name = _newTarget.prototype.constructor.name; + Object.setPrototypeOf(_this, _newTarget.prototype); + return _this; + } + return SentryError; +}(Error)); + +/** + * Checks whether given value's type is one of a few Error or Error-like + * {@link isError}. + * + * @param wat A value to be checked. + * @returns A boolean representing the result. + */ +/** + * Checks whether given value's type is an regexp + * {@link isRegExp}. + * + * @param wat A value to be checked. + * @returns A boolean representing the result. + */ +function isRegExp(wat) { + return Object.prototype.toString.call(wat) === '[object RegExp]'; +} + +/** + * Requires a module which is protected _against bundler minification. + * + * @param request The module path to resolve + */ +/** + * Checks whether we're in the Node.js or Browser environment + * + * @returns Answer to given question + */ +function isNodeEnv() { + // tslint:disable:strict-type-predicates + return Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]'; +} +var fallbackGlobalObject = {}; +/** + * Safely get global scope object + * + * @returns Global scope object + */ +function getGlobalObject() { + return (isNodeEnv() + ? global + : typeof window !== 'undefined' + ? window + : typeof self !== 'undefined' + ? self + : fallbackGlobalObject); +} +/** JSDoc */ +function consoleSandbox(callback) { + var global = getGlobalObject(); + var levels = ['debug', 'info', 'warn', 'error', 'log', 'assert']; + if (!('console' in global)) { + return callback(); + } + var originalConsole = global.console; + var wrappedLevels = {}; + // Restore all wrapped console methods + levels.forEach(function (level) { + if (level in global.console && originalConsole[level].__sentry__) { + wrappedLevels[level] = originalConsole[level].__sentry_wrapped__; + originalConsole[level] = originalConsole[level].__sentry_original__; + } + }); + // Perform callback manipulations + var result = callback(); + // Revert restoration to wrapped state + Object.keys(wrappedLevels).forEach(function (level) { + originalConsole[level] = wrappedLevels[level]; + }); + return result; +} + +// TODO: Implement different loggers for different environments +var global$1 = getGlobalObject(); +/** Prefix for logging strings */ +var PREFIX = 'Sentry Logger '; +/** JSDoc */ +var Logger = /** @class */ (function () { + /** JSDoc */ + function Logger() { + this._enabled = false; + } + /** JSDoc */ + Logger.prototype.disable = function () { + this._enabled = false; + }; + /** JSDoc */ + Logger.prototype.enable = function () { + this._enabled = true; + }; + /** JSDoc */ + Logger.prototype.log = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (!this._enabled) { + return; + } + consoleSandbox(function () { + global$1.console.log(PREFIX + "[Log]: " + args.join(' ')); // tslint:disable-line:no-console + }); + }; + /** JSDoc */ + Logger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (!this._enabled) { + return; + } + consoleSandbox(function () { + global$1.console.warn(PREFIX + "[Warn]: " + args.join(' ')); // tslint:disable-line:no-console + }); + }; + /** JSDoc */ + Logger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (!this._enabled) { + return; + } + consoleSandbox(function () { + global$1.console.error(PREFIX + "[Error]: " + args.join(' ')); // tslint:disable-line:no-console + }); + }; + return Logger; +}()); +// Ensure we only have a single logger instance, even if multiple versions of @sentry/utils are being used +global$1.__SENTRY__ = global$1.__SENTRY__ || {}; +var logger = global$1.__SENTRY__.logger || (global$1.__SENTRY__.logger = new Logger()); + +// tslint:disable:no-unsafe-any + +/** + * Wrap a given object method with a higher-order function + * + * @param source An object that contains a method to be wrapped. + * @param name A name of method to be wrapped. + * @param replacement A function that should be used to wrap a given method. + * @returns void + */ +function fill(source, name, replacement) { + if (!(name in source)) { + return; + } + var original = source[name]; + var wrapped = replacement(original); + // Make sure it's a function first, as we need to attach an empty prototype for `defineProperties` to work + // otherwise it'll throw "TypeError: Object.defineProperties called on non-object" + // tslint:disable-next-line:strict-type-predicates + if (typeof wrapped === 'function') { + try { + wrapped.prototype = wrapped.prototype || {}; + Object.defineProperties(wrapped, { + __sentry__: { + enumerable: false, + value: true, + }, + __sentry_original__: { + enumerable: false, + value: original, + }, + __sentry_wrapped__: { + enumerable: false, + value: wrapped, + }, + }); + } + catch (_Oo) { + // This can throw if multiple fill happens on a global object like XMLHttpRequest + // Fixes https://github.com/getsentry/sentry-javascript/issues/2043 + } + } + source[name] = wrapped; +} + +// Slightly modified (no IE8 support, ES6) and transcribed to TypeScript + +/** + * Checks if the value matches a regex or includes the string + * @param value The string value to be checked against + * @param pattern Either a regex or a string that must be contained in value + */ +function isMatchingPattern(value, pattern) { + if (isRegExp(pattern)) { + return pattern.test(value); + } + if (typeof pattern === 'string') { + return value.includes(pattern); + } + return false; +} + +/** + * Tells whether current environment supports Fetch API + * {@link supportsFetch}. + * + * @returns Answer to the given question. + */ +function supportsFetch() { + if (!('fetch' in getGlobalObject())) { + return false; + } + try { + // tslint:disable-next-line:no-unused-expression + new Headers(); + // tslint:disable-next-line:no-unused-expression + new Request(''); + // tslint:disable-next-line:no-unused-expression + new Response(); + return true; + } + catch (e) { + return false; + } +} +/** + * Tells whether current environment supports Fetch API natively + * {@link supportsNativeFetch}. + * + * @returns Answer to the given question. + */ +function supportsNativeFetch() { + if (!supportsFetch()) { + return false; + } + var global = getGlobalObject(); + return global.fetch.toString().indexOf('native') !== -1; +} + +/** SyncPromise internal states */ +var States; +(function (States) { + /** Pending */ + States["PENDING"] = "PENDING"; + /** Resolved / OK */ + States["RESOLVED"] = "RESOLVED"; + /** Rejected / Error */ + States["REJECTED"] = "REJECTED"; +})(States || (States = {})); + +/** + * Tracing Integration + */ +var Tracing = /** @class */ (function () { + /** + * Constructor for Tracing + * + * @param _options TracingOptions + */ + function Tracing(_options) { + if (_options === void 0) { _options = {}; } + this._options = _options; + /** + * @inheritDoc + */ + this.name = Tracing.id; + if (!Array.isArray(_options.tracingOrigins) || _options.tracingOrigins.length === 0) { + consoleSandbox(function () { + var defaultTracingOrigins = ['localhost', /^\//]; + // @ts-ignore + console.warn('Sentry: You need to define `tracingOrigins` in the options. Set an array of urls or patterns to trace.'); + // @ts-ignore + console.warn("Sentry: We added a reasonable default for you: " + defaultTracingOrigins); + _options.tracingOrigins = defaultTracingOrigins; + }); + } + } + /** + * @inheritDoc + */ + Tracing.prototype.setupOnce = function (_, getCurrentHub) { + if (this._options.traceXHR !== false) { + this._traceXHR(getCurrentHub); + } + if (this._options.traceFetch !== false) { + this._traceFetch(getCurrentHub); + } + if (this._options.autoStartOnDomReady !== false) { + getGlobalObject().addEventListener('DOMContentLoaded', function () { + Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); + }); + getGlobalObject().document.onreadystatechange = function () { + if (document.readyState === 'complete') { + Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); + } + }; + } + }; + /** + * Starts a new trace + * @param hub The hub to start the trace on + * @param transaction Optional transaction + */ + Tracing.startTrace = function (hub, transaction) { + hub.configureScope(function (scope) { + scope.startSpan(); + scope.setTransaction(transaction); + }); + }; + /** + * JSDoc + */ + Tracing.prototype._traceXHR = function (getCurrentHub) { + if (!('XMLHttpRequest' in getGlobalObject())) { + return; + } + var xhrproto = XMLHttpRequest.prototype; + fill(xhrproto, 'open', function (originalOpen) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + // @ts-ignore + var self = getCurrentHub().getIntegration(Tracing); + if (self) { + self._xhrUrl = args[1]; + } + // tslint:disable-next-line: no-unsafe-any + return originalOpen.apply(this, args); + }; + }); + fill(xhrproto, 'send', function (originalSend) { + return function () { + var _this = this; + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + // @ts-ignore + var self = getCurrentHub().getIntegration(Tracing); + if (self && self._xhrUrl && self._options.tracingOrigins) { + var url_1 = self._xhrUrl; + var headers_1 = getCurrentHub().traceHeaders(); + // tslint:disable-next-line: prefer-for-of + var isWhitelisted = self._options.tracingOrigins.some(function (origin) { + return isMatchingPattern(url_1, origin); + }); + if (isWhitelisted && this.setRequestHeader) { + Object.keys(headers_1).forEach(function (key) { + _this.setRequestHeader(key, headers_1[key]); + }); + } + } + // tslint:disable-next-line: no-unsafe-any + return originalSend.apply(this, args); + }; + }); + }; + /** + * JSDoc + */ + Tracing.prototype._traceFetch = function (getCurrentHub) { + if (!supportsNativeFetch()) { + return; + } + + console.log("PATCHING FETCH"); + + // tslint:disable: only-arrow-functions + fill(getGlobalObject(), 'fetch', function (originalFetch) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + // @ts-ignore + var self = getCurrentHub().getIntegration(Tracing); + if (self && self._options.tracingOrigins) { + console.log("blafalseq"); + var url_2 = args[0]; + var options = args[1] = args[1] || {}; + var whiteListed_1 = false; + self._options.tracingOrigins.forEach(function (whiteListUrl) { + if (!whiteListed_1) { + whiteListed_1 = isMatchingPattern(url_2, whiteListUrl); + console.log('a', url_2, whiteListUrl); + } + }); + if (whiteListed_1) { + console.log('aaaaaa', options, whiteListed_1); + if (options.headers) { + + if (Array.isArray(options.headers)) { + options.headers = __spread(options.headers, Object.entries(getCurrentHub().traceHeaders())); + } + else { + options.headers = __assign({}, options.headers, getCurrentHub().traceHeaders()); + } + } + else { + options.headers = getCurrentHub().traceHeaders(); + } + + console.log(options.headers); + } + } + + args[1] = options; + // tslint:disable-next-line: no-unsafe-any + return originalFetch.apply(getGlobalObject(), args); + }; + }); + // tslint:enable: only-arrow-functions + }; + /** + * @inheritDoc + */ + Tracing.id = 'Tracing'; + return Tracing; +}()); + +exports.Tracing = Tracing; + + + __window.Sentry = __window.Sentry || {}; + __window.Sentry.Integrations = __window.Sentry.Integrations || {}; + Object.assign(__window.Sentry.Integrations, exports); + + + + + + + + + + + + +}(window)); +//# sourceMappingURL=tracing.js.map diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html new file mode 100644 index 0000000..2aa95e7 --- /dev/null +++ b/examples/tracing/templates/index.html @@ -0,0 +1,57 @@ + + + + + + + +

Decode your base64 string as a service (that calls another service)

+ + A base64 string
+ + +

Output:

+
diff --git a/examples/tracing/traceviewer.py b/examples/tracing/traceviewer.py
new file mode 100644
index 0000000..9c1435f
--- /dev/null
+++ b/examples/tracing/traceviewer.py
@@ -0,0 +1,61 @@
+import json
+import sys
+
+print("digraph mytrace {")
+print("rankdir=LR")
+
+all_spans = []
+
+for line in sys.stdin:
+    event = json.loads(line)
+    if event.get("type") != "transaction":
+        continue
+
+    trace_ctx = event["contexts"]["trace"]
+    trace_span = dict(trace_ctx)  # fake a span entry from transaction event
+    trace_span["description"] = event["transaction"]
+    trace_span["start_timestamp"] = event["start_timestamp"]
+    trace_span["timestamp"] = event["timestamp"]
+
+    if "parent_span_id" not in trace_ctx:
+        print(
+            '{} [label="trace:{} ({})"];'.format(
+                int(trace_ctx["trace_id"], 16),
+                event["transaction"],
+                trace_ctx["trace_id"],
+            )
+        )
+
+    for span in event["spans"] + [trace_span]:
+        print(
+            '{} [label="span:{} ({})"];'.format(
+                int(span["span_id"], 16), span["description"], span["span_id"]
+            )
+        )
+        if "parent_span_id" in span:
+            print(
+                "{} -> {};".format(
+                    int(span["parent_span_id"], 16), int(span["span_id"], 16)
+                )
+            )
+
+        print(
+            "{} -> {} [style=dotted];".format(
+                int(span["trace_id"], 16), int(span["span_id"], 16)
+            )
+        )
+
+        all_spans.append(span)
+
+
+for s1 in all_spans:
+    for s2 in all_spans:
+        if s1["start_timestamp"] > s2["timestamp"]:
+            print(
+                '{} -> {} [color="#efefef"];'.format(
+                    int(s1["span_id"], 16), int(s2["span_id"], 16)
+                )
+            )
+
+
+print("}")
diff --git a/examples/tracing/tracing.py b/examples/tracing/tracing.py
new file mode 100644
index 0000000..9612d9a
--- /dev/null
+++ b/examples/tracing/tracing.py
@@ -0,0 +1,73 @@
+import json
+import flask
+import os
+import redis
+import rq
+import sentry_sdk
+import time
+import urllib3
+
+from sentry_sdk.integrations.flask import FlaskIntegration
+from sentry_sdk.integrations.rq import RqIntegration
+
+
+app = flask.Flask(__name__)
+redis_conn = redis.Redis()
+http = urllib3.PoolManager()
+queue = rq.Queue(connection=redis_conn)
+
+
+def write_event(event):
+    with open("events", "a") as f:
+        f.write(json.dumps(event))
+        f.write("\n")
+
+
+sentry_sdk.init(
+    integrations=[FlaskIntegration(), RqIntegration()],
+    traces_sample_rate=1.0,
+    traceparent_v2=True,
+    debug=True,
+    transport=write_event,
+)
+
+
+def decode_base64(encoded, redis_key):
+    time.sleep(1)
+    r = http.request("GET", "http://httpbin.org/base64/{}".format(encoded))
+    redis_conn.set(redis_key, r.data)
+
+
+@app.route("/")
+def index():
+    return flask.render_template(
+        "index.html",
+        sentry_dsn=os.environ["SENTRY_DSN"],
+        traceparent=dict(sentry_sdk.Hub.current.iter_trace_propagation_headers()),
+    )
+
+
+@app.route("/compute/")
+def compute(input):
+    redis_key = "sentry-python-tracing-example-result:{}".format(input)
+    redis_conn.delete(redis_key)
+    queue.enqueue(decode_base64, encoded=input, redis_key=redis_key)
+
+    return redis_key
+
+
+@app.route("/wait/")
+def wait(redis_key):
+    result = redis_conn.get(redis_key)
+    if result is None:
+        return "NONE"
+    else:
+        redis_conn.delete(redis_key)
+        return "RESULT: {}".format(result)
+
+
+@app.cli.command("worker")
+def run_worker():
+    print("WORKING")
+    worker = rq.Worker([queue], connection=queue.connection)
+    worker.work()
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..7ad5ce7
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,60 @@
+[mypy]
+allow_redefinition = True
+check_untyped_defs = True
+; disallow_any_decorated = True
+; disallow_any_explicit = True
+; disallow_any_expr = True
+disallow_any_generics = True
+; disallow_any_unimported = True
+disallow_incomplete_defs = True
+; disallow_subclassing_any = True
+; disallow_untyped_calls = True
+disallow_untyped_decorators = True
+disallow_untyped_defs = True
+no_implicit_optional = True
+strict_equality = True
+strict_optional = True
+warn_redundant_casts = True
+; warn_return_any = True
+; warn_unused_configs = True
+; warn_unused_ignores = True
+
+
+; Relaxations:
+
+[mypy-sentry_sdk._compat]
+disallow_untyped_defs = False
+
+[mypy-sentry_sdk.scope]
+disallow_untyped_defs = False
+
+[mypy-sentry_sdk.integrations.*]
+disallow_any_generics = False
+disallow_untyped_defs = False
+
+[mypy-sentry_sdk.integrations.aiohttp]
+disallow_any_generics = True
+disallow_untyped_defs = True
+
+[mypy-sentry_sdk.utils]
+disallow_any_generics = False
+disallow_untyped_defs = False
+
+[mypy-django.*]
+ignore_missing_imports = True
+[mypy-pyramid.*]
+ignore_missing_imports = True
+[mypy-psycopg2.*]
+ignore_missing_imports = True
+[mypy-pytest.*]
+ignore_missing_imports = True
+[mypy-aiohttp.*]
+ignore_missing_imports = True
+[mypy-sanic.*]
+ignore_missing_imports = True
+[mypy-tornado.*]
+ignore_missing_imports = True
+[mypy-fakeredis.*]
+ignore_missing_imports = True
+[mypy-rq.*]
+ignore_missing_imports = True
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..ca43883
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings
+addopts = --boxed --tb=short
+markers = tests_internal_exceptions
diff --git a/scripts/aws-cleanup.sh b/scripts/aws-cleanup.sh
new file mode 100644
index 0000000..1219668
--- /dev/null
+++ b/scripts/aws-cleanup.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+# Delete all AWS Lambda functions
+
+export AWS_ACCESS_KEY_ID="$SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"
+export AWS_SECRET_ACCESS_KEY="$SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"
+export AWS_IAM_ROLE="$SENTRY_PYTHON_TEST_AWS_IAM_ROLE"
+
+for func in $(aws lambda list-functions | jq -r .Functions[].FunctionName); do
+    echo "Deleting $func"
+    aws lambda delete-function --function-name $func
+done
diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh
new file mode 100755
index 0000000..d048369
--- /dev/null
+++ b/scripts/bump-version.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+set -eux
+
+SCRIPT_DIR="$( dirname "$0" )"
+cd $SCRIPT_DIR/..
+
+OLD_VERSION="${1}"
+NEW_VERSION="${2}"
+
+echo "Current version: $OLD_VERSION"
+echo "Bumping version: $NEW_VERSION"
+
+function replace() {
+    ! grep "$2" $3
+    perl -i -pe "s/$1/$2/g" $3
+    grep "$2" $3  # verify that replacement was successful
+}
+
+replace "version=\"[0-9.]+\"" "version=\"$NEW_VERSION\"" ./setup.py
+replace "VERSION = \"[0-9.]+\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py
+replace "release = \"[0-9.]+\"" "release = \"$NEW_VERSION\"" ./docs/conf.py
diff --git a/scripts/download-semaphore.sh b/scripts/download-semaphore.sh
new file mode 100755
index 0000000..0b5e2ce
--- /dev/null
+++ b/scripts/download-semaphore.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+set -e
+
+if { [ "$TRAVIS" == "true" ] || [ "$TF_BUILD" == "True" ]; } && [ -z "$GITHUB_API_TOKEN" ]; then
+    echo "Not running on external pull request"
+    exit 0;
+fi
+
+target=semaphore
+
+# Download the latest semaphore release for Travis
+
+output="$(
+    curl -s \
+    https://api.github.com/repos/getsentry/semaphore/releases/latest?access_token=$GITHUB_API_TOKEN
+)"
+
+echo "$output"
+
+output="$(echo "$output" \
+    | grep "$(uname -s)" \
+    | grep -v "\.zip" \
+    | grep "download" \
+    | cut -d : -f 2,3 \
+    | tr -d , \
+    | tr -d \")"
+
+echo "$output"
+echo "$output" | wget -i - -O $target
+[ -s $target ]
+chmod +x $target
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
new file mode 100755
index 0000000..38a5345
--- /dev/null
+++ b/scripts/runtox.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -ex
+
+if [ -n "$TOXPATH" ]; then
+    true
+elif which tox &> /dev/null; then
+    TOXPATH=tox
+else
+    TOXPATH=./.venv/bin/tox
+fi
+
+# Usage: sh scripts/runtox.sh py3.7 
+# Runs all environments with substring py3.7 and the given arguments for pytest
+
+if [ -n "$1" ]; then
+    searchstring="$1"
+elif [ -n "$TRAVIS_PYTHON_VERSION" ]; then
+    searchstring="$(echo py$TRAVIS_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
+elif [ -n "$AZURE_PYTHON_VERSION" ]; then
+    searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
+fi
+
+exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr '\n' ',') -- "${@:2}"
diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py
new file mode 100644
index 0000000..b211a6c
--- /dev/null
+++ b/sentry_sdk/__init__.py
@@ -0,0 +1,25 @@
+from sentry_sdk.hub import Hub, init
+from sentry_sdk.scope import Scope
+from sentry_sdk.transport import Transport, HttpTransport
+from sentry_sdk.client import Client
+
+from sentry_sdk.api import *  # noqa
+from sentry_sdk.api import __all__ as api_all
+
+from sentry_sdk.consts import VERSION  # noqa
+
+__all__ = api_all + [  # noqa
+    "Hub",
+    "Scope",
+    "Client",
+    "Transport",
+    "HttpTransport",
+    "init",
+    "integrations",
+]
+
+# Initialize the debug support after everything is loaded
+from sentry_sdk.debug import init_debug_support
+
+init_debug_support()
+del init_debug_support
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
new file mode 100644
index 0000000..c94ef6d
--- /dev/null
+++ b/sentry_sdk/_compat.py
@@ -0,0 +1,87 @@
+import sys
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+    from typing import Tuple
+    from typing import Any
+    from typing import Type
+
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+    import urlparse  # noqa
+
+    text_type = unicode  # noqa
+    import Queue as queue  # noqa
+
+    string_types = (str, text_type)
+    number_types = (int, long, float)  # noqa
+    int_types = (int, long)  # noqa
+    iteritems = lambda x: x.iteritems()  # noqa: B301
+
+    def implements_str(cls):
+        cls.__unicode__ = cls.__str__
+        cls.__str__ = lambda x: unicode(x).encode("utf-8")  # noqa
+        return cls
+
+    exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
+
+
+else:
+    import urllib.parse as urlparse  # noqa
+    import queue  # noqa
+
+    text_type = str
+    string_types = (text_type,)  # type: Tuple[type]
+    number_types = (int, float)  # type: Tuple[type, type]
+    int_types = (int,)  # noqa
+    iteritems = lambda x: x.items()
+
+    def _identity(x):
+        return x
+
+    def implements_str(x):
+        return x
+
+    def reraise(tp, value, tb=None):
+        # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None
+        assert value is not None
+        if value.__traceback__ is not tb:
+            raise value.with_traceback(tb)
+        raise value
+
+
+def with_metaclass(meta, *bases):
+    class metaclass(type):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+
+    return type.__new__(metaclass, "temporary_class", (), {})
+
+
+def check_thread_support():
+    # type: () -> None
+    try:
+        from uwsgi import opt  # type: ignore
+    except ImportError:
+        return
+
+    # When `threads` is passed in as a uwsgi option,
+    # `enable-threads` is implied on.
+    if "threads" in opt:
+        return
+
+    if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
+        from warnings import warn
+
+        warn(
+            Warning(
+                "We detected the use of uwsgi with disabled threads.  "
+                "This will cause issues with the transport you are "
+                "trying to use.  Please enable threading for uwsgi.  "
+                '(Enable the "enable-threads" flag).'
+            )
+        )
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
new file mode 100644
index 0000000..99654e9
--- /dev/null
+++ b/sentry_sdk/_types.py
@@ -0,0 +1,28 @@
+try:
+    from typing import TYPE_CHECKING as MYPY
+except ImportError:
+    MYPY = False
+
+
+if MYPY:
+    from types import TracebackType
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from typing import Tuple
+    from typing import Type
+
+    ExcInfo = Tuple[
+        Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
+    ]
+
+    Event = Dict[str, Any]
+    Hint = Dict[str, Any]
+
+    Breadcrumb = Dict[str, Any]
+    BreadcrumbHint = Dict[str, Any]
+
+    EventProcessor = Callable[[Event, Hint], Optional[Event]]
+    ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
+    BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
new file mode 100644
index 0000000..873ea96
--- /dev/null
+++ b/sentry_sdk/api.py
@@ -0,0 +1,195 @@
+import inspect
+from contextlib import contextmanager
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.scope import Scope
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Optional
+    from typing import overload
+    from typing import Callable
+    from typing import TypeVar
+    from typing import ContextManager
+
+    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint
+    from sentry_sdk.tracing import Span
+
+    T = TypeVar("T")
+    F = TypeVar("F", bound=Callable[..., Any])
+else:
+
+    def overload(x):
+        # type: (T) -> T
+        return x
+
+
+__all__ = [
+    "capture_event",
+    "capture_message",
+    "capture_exception",
+    "add_breadcrumb",
+    "configure_scope",
+    "push_scope",
+    "flush",
+    "last_event_id",
+    "start_span",
+]
+
+
+def hubmethod(f):
+    # type: (F) -> F
+    f.__doc__ = "%s\n\n%s" % (
+        "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__,
+        inspect.getdoc(getattr(Hub, f.__name__)),
+    )
+    return f
+
+
+@hubmethod
+def capture_event(
+    event,  # type: Event
+    hint=None,  # type: Optional[Hint]
+):
+    # type: (...) -> Optional[str]
+    hub = Hub.current
+    if hub is not None:
+        return hub.capture_event(event, hint)
+    return None
+
+
+@hubmethod
+def capture_message(
+    message,  # type: str
+    level=None,  # type: Optional[str]
+):
+    # type: (...) -> Optional[str]
+    hub = Hub.current
+    if hub is not None:
+        return hub.capture_message(message, level)
+    return None
+
+
+@hubmethod
+def capture_exception(
+    error=None  # type: Optional[BaseException]
+):
+    # type: (...) -> Optional[str]
+    hub = Hub.current
+    if hub is not None:
+        return hub.capture_exception(error)
+    return None
+
+
+@hubmethod
+def add_breadcrumb(
+    crumb=None,  # type: Optional[Breadcrumb]
+    hint=None,  # type: Optional[BreadcrumbHint]
+    **kwargs  # type: Any
+):
+    # type: (...) -> None
+    hub = Hub.current
+    if hub is not None:
+        return hub.add_breadcrumb(crumb, hint, **kwargs)
+
+
+@overload  # noqa
+def configure_scope():
+    # type: () -> ContextManager[Scope]
+    pass
+
+
+@overload  # noqa
+def configure_scope(
+    callback  # type: Callable[[Scope], None]
+):
+    # type: (...) -> None
+    pass
+
+
+@hubmethod  # noqa
+def configure_scope(
+    callback=None  # type: Optional[Callable[[Scope], None]]
+):
+    # type: (...) -> Optional[ContextManager[Scope]]
+    hub = Hub.current
+    if hub is not None:
+        return hub.configure_scope(callback)
+    elif callback is None:
+
+        @contextmanager
+        def inner():
+            yield Scope()
+
+        return inner()
+    else:
+        # returned if user provided callback
+        return None
+
+
+@overload  # noqa
+def push_scope():
+    # type: () -> ContextManager[Scope]
+    pass
+
+
+@overload  # noqa
+def push_scope(
+    callback  # type: Callable[[Scope], None]
+):
+    # type: (...) -> None
+    pass
+
+
+@hubmethod  # noqa
+def push_scope(
+    callback=None  # type: Optional[Callable[[Scope], None]]
+):
+    # type: (...) -> Optional[ContextManager[Scope]]
+    hub = Hub.current
+    if hub is not None:
+        return hub.push_scope(callback)
+    elif callback is None:
+
+        @contextmanager
+        def inner():
+            yield Scope()
+
+        return inner()
+    else:
+        # returned if user provided callback
+        return None
+
+
+@hubmethod
+def flush(
+    timeout=None,  # type: Optional[float]
+    callback=None,  # type: Optional[Callable[[int, float], None]]
+):
+    # type: (...) -> None
+    hub = Hub.current
+    if hub is not None:
+        return hub.flush(timeout=timeout, callback=callback)
+
+
+@hubmethod
+def last_event_id():
+    # type: () -> Optional[str]
+    hub = Hub.current
+    if hub is not None:
+        return hub.last_event_id()
+    return None
+
+
+@hubmethod
+def start_span(
+    span=None,  # type: Optional[Span]
+    **kwargs  # type: Any
+):
+    # type: (...) -> Span
+
+    # TODO: All other functions in this module check for
+    # `Hub.current is None`. That actually should never happen?
+    return Hub.current.start_span(span=span, **kwargs)
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
new file mode 100644
index 0000000..b46cd38
--- /dev/null
+++ b/sentry_sdk/client.py
@@ -0,0 +1,333 @@
+import os
+import uuid
+import random
+from datetime import datetime
+import socket
+
+from sentry_sdk._compat import string_types, text_type, iteritems
+from sentry_sdk.utils import (
+    handle_in_app,
+    get_type_name,
+    capture_internal_exceptions,
+    current_stacktrace,
+    logger,
+)
+from sentry_sdk.serializer import Serializer
+from sentry_sdk.transport import make_transport
+from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.integrations import setup_integrations
+from sentry_sdk.utils import ContextVar
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+
+    from sentry_sdk.scope import Scope
+    from sentry_sdk._types import Event, Hint
+
+
+_client_init_debug = ContextVar("client_init_debug")
+_client_in_capture_event = ContextVar("client_in_capture_event")
+
+
+def _get_options(*args, **kwargs):
+    # type: (*Optional[str], **Any) -> Dict[str, Any]
+    if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
+        dsn = args[0]  # type: Optional[str]
+        args = args[1:]
+    else:
+        dsn = None
+
+    rv = dict(DEFAULT_OPTIONS)
+    options = dict(*args, **kwargs)  # type: ignore
+    if dsn is not None and options.get("dsn") is None:
+        options["dsn"] = dsn  # type: ignore
+
+    for key, value in iteritems(options):
+        if key not in rv:
+            raise TypeError("Unknown option %r" % (key,))
+        rv[key] = value
+
+    if rv["dsn"] is None:
+        rv["dsn"] = os.environ.get("SENTRY_DSN")
+
+    if rv["release"] is None:
+        rv["release"] = os.environ.get("SENTRY_RELEASE")
+
+    if rv["environment"] is None:
+        rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT")
+
+    if rv["server_name"] is None and hasattr(socket, "gethostname"):
+        rv["server_name"] = socket.gethostname()
+
+    return rv  # type: ignore
+
+
+class _Client(object):
+    """The client is internally responsible for capturing the events and
+    forwarding them to sentry through the configured transport.  It takes
+    the client options as keyword arguments and optionally the DSN as first
+    argument.
+    """
+
+    def __init__(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        self.options = get_options(*args, **kwargs)  # type: Dict[str, Any]
+        self._init_impl()
+
+    def __getstate__(self):
+        # type: () -> Any
+        return {"options": self.options}
+
+    def __setstate__(self, state):
+        # type: (Any) -> None
+        self.options = state["options"]
+        self._init_impl()
+
+    def _init_impl(self):
+        # type: () -> None
+        old_debug = _client_init_debug.get(False)
+        try:
+            _client_init_debug.set(self.options["debug"])
+            self.transport = make_transport(self.options)
+
+            request_bodies = ("always", "never", "small", "medium")
+            if self.options["request_bodies"] not in request_bodies:
+                raise ValueError(
+                    "Invalid value for request_bodies. Must be one of {}".format(
+                        request_bodies
+                    )
+                )
+
+            self.integrations = setup_integrations(
+                self.options["integrations"],
+                with_defaults=self.options["default_integrations"],
+            )
+        finally:
+            _client_init_debug.set(old_debug)
+
+    @property
+    def dsn(self):
+        # type: () -> Optional[str]
+        """Returns the configured DSN as string."""
+        return self.options["dsn"]
+
+    def _prepare_event(
+        self,
+        event,  # type: Event
+        hint,  # type: Optional[Hint]
+        scope,  # type: Optional[Scope]
+    ):
+        # type: (...) -> Optional[Event]
+        if event.get("timestamp") is None:
+            event["timestamp"] = datetime.utcnow()
+
+        hint = dict(hint or ())  # type: Hint
+
+        if scope is not None:
+            event_ = scope.apply_to_event(event, hint)
+            if event_ is None:
+                return None
+            event = event_
+
+        if (
+            self.options["attach_stacktrace"]
+            and "exception" not in event
+            and "stacktrace" not in event
+            and "threads" not in event
+        ):
+            with capture_internal_exceptions():
+                event["threads"] = {
+                    "values": [
+                        {
+                            "stacktrace": current_stacktrace(
+                                self.options["with_locals"]
+                            ),
+                            "crashed": False,
+                            "current": True,
+                        }
+                    ]
+                }
+
+        for key in "release", "environment", "server_name", "dist":
+            if event.get(key) is None and self.options[key] is not None:  # type: ignore
+                event[key] = text_type(self.options[key]).strip()  # type: ignore
+        if event.get("sdk") is None:
+            sdk_info = dict(SDK_INFO)
+            sdk_info["integrations"] = sorted(self.integrations.keys())
+            event["sdk"] = sdk_info
+
+        if event.get("platform") is None:
+            event["platform"] = "python"
+
+        event = handle_in_app(
+            event, self.options["in_app_exclude"], self.options["in_app_include"]
+        )
+
+        # Postprocess the event here so that annotated types do
+        # generally not surface in before_send
+        if event is not None:
+            event = Serializer().serialize_event(event)
+
+        before_send = self.options["before_send"]
+        if before_send is not None:
+            new_event = None
+            with capture_internal_exceptions():
+                new_event = before_send(event, hint or {})
+            if new_event is None:
+                logger.info("before send dropped event (%s)", event)
+            event = new_event  # type: ignore
+
+        return event
+
+    def _is_ignored_error(self, event, hint):
+        # type: (Event, Hint) -> bool
+        exc_info = hint.get("exc_info")
+        if exc_info is None:
+            return False
+
+        type_name = get_type_name(exc_info[0])
+        full_name = "%s.%s" % (exc_info[0].__module__, type_name)
+
+        for errcls in self.options["ignore_errors"]:
+            # String types are matched against the type name in the
+            # exception only
+            if isinstance(errcls, string_types):
+                if errcls == full_name or errcls == type_name:
+                    return True
+            else:
+                if issubclass(exc_info[0], errcls):  # type: ignore
+                    return True
+
+        return False
+
+    def _should_capture(
+        self,
+        event,  # type: Event
+        hint,  # type: Hint
+        scope=None,  # type: Optional[Scope]
+    ):
+        # type: (...) -> bool
+        if scope is not None and not scope._should_capture:
+            return False
+
+        if (
+            self.options["sample_rate"] < 1.0
+            and random.random() >= self.options["sample_rate"]
+        ):
+            return False
+
+        if self._is_ignored_error(event, hint):
+            return False
+
+        return True
+
+    def capture_event(
+        self,
+        event,  # type: Event
+        hint=None,  # type: Optional[Hint]
+        scope=None,  # type: Optional[Scope]
+    ):
+        # type: (...) -> Optional[str]
+        """Captures an event.
+
+        :param event: A ready-made event that can be directly sent to Sentry.
+
+        :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
+
+        :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
+        """
+        is_recursive = _client_in_capture_event.get(False)
+        if is_recursive:
+            return None
+
+        _client_in_capture_event.set(True)
+
+        try:
+            if self.transport is None:
+                return None
+            if hint is None:
+                hint = {}
+            event_id = event.get("event_id")
+            if event_id is None:
+                event["event_id"] = event_id = uuid.uuid4().hex
+            if not self._should_capture(event, hint, scope):
+                return None
+            event_opt = self._prepare_event(event, hint, scope)
+            if event_opt is None:
+                return None
+            self.transport.capture_event(event_opt)
+            return event_id
+        finally:
+            _client_in_capture_event.set(False)
+
+    def close(
+        self,
+        timeout=None,  # type: Optional[float]
+        callback=None,  # type: Optional[Callable[[int, float], None]]
+    ):
+        # type: (...) -> None
+        """
+        Close the client and shut down the transport. Arguments have the same
+        semantics as :py:meth:`Client.flush`.
+        """
+        if self.transport is not None:
+            self.flush(timeout=timeout, callback=callback)
+            self.transport.kill()
+            self.transport = None
+
+    def flush(
+        self,
+        timeout=None,  # type: Optional[float]
+        callback=None,  # type: Optional[Callable[[int, float], None]]
+    ):
+        # type: (...) -> None
+        """
+        Wait for the current events to be sent.
+
+        :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used.
+
+        :param callback: Is invoked with the number of pending events and the configured timeout.
+        """
+        if self.transport is not None:
+            if timeout is None:
+                timeout = self.options["shutdown_timeout"]
+            self.transport.flush(timeout=timeout, callback=callback)
+
+    def __enter__(self):
+        # type: () -> _Client
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):
+        # type: (Any, Any, Any) -> None
+        self.close()
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    # Make mypy, PyCharm and other static analyzers think `get_options` is a
+    # type to have nicer autocompletion for params.
+    #
+    # Use `ClientConstructor` to define the argument types of `init` and
+    # `Dict[str, Any]` to tell static analyzers about the return type.
+
+    class get_options(ClientConstructor, Dict[str, Any]):
+        pass
+
+    class Client(ClientConstructor, _Client):
+        pass
+
+
+else:
+    # Alias `get_options` for actual usage. Go through the lambda indirection
+    # to throw PyCharm off of the weakly typed signature (it would otherwise
+    # discover both the weakly typed signature of `_init` and our faked `init`
+    # type).
+
+    get_options = (lambda: _get_options)()
+    Client = (lambda: _Client)()
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
new file mode 100644
index 0000000..5da884a
--- /dev/null
+++ b/sentry_sdk/consts.py
@@ -0,0 +1,80 @@
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+    from typing import Callable
+    from typing import Union
+    from typing import List
+    from typing import Type
+    from typing import Dict
+    from typing import Any
+    from typing import Sequence
+
+    from sentry_sdk.transport import Transport
+    from sentry_sdk.integrations import Integration
+
+    from sentry_sdk._types import Event, EventProcessor, BreadcrumbProcessor
+
+
+# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
+# take these arguments (even though they take opaque **kwargs)
+class ClientConstructor(object):
+    def __init__(
+        self,
+        dsn=None,  # type: Optional[str]
+        with_locals=True,  # type: bool
+        max_breadcrumbs=100,  # type: int
+        release=None,  # type: Optional[str]
+        environment=None,  # type: Optional[str]
+        server_name=None,  # type: Optional[str]
+        shutdown_timeout=2,  # type: int
+        integrations=[],  # type: Sequence[Integration]
+        in_app_include=[],  # type: List[str]
+        in_app_exclude=[],  # type: List[str]
+        default_integrations=True,  # type: bool
+        dist=None,  # type: Optional[str]
+        transport=None,  # type: Optional[Union[Transport, Type[Transport], Callable[[Event], None]]]
+        sample_rate=1.0,  # type: float
+        send_default_pii=False,  # type: bool
+        http_proxy=None,  # type: Optional[str]
+        https_proxy=None,  # type: Optional[str]
+        ignore_errors=[],  # type: List[Union[type, str]]
+        request_bodies="medium",  # type: str
+        before_send=None,  # type: Optional[EventProcessor]
+        before_breadcrumb=None,  # type: Optional[BreadcrumbProcessor]
+        debug=False,  # type: bool
+        attach_stacktrace=False,  # type: bool
+        ca_certs=None,  # type: Optional[str]
+        propagate_traces=True,  # type: bool
+        # DO NOT ENABLE THIS RIGHT NOW UNLESS YOU WANT TO EXCEED YOUR EVENT QUOTA IMMEDIATELY
+        traces_sample_rate=0.0,  # type: float
+        traceparent_v2=False,  # type: bool
+        _experiments={},  # type: Dict[str, Any]
+    ):
+        # type: (...) -> None
+        pass
+
+
+def _get_default_options():
+    # type: () -> Dict[str, Any]
+    import inspect
+
+    if hasattr(inspect, "getfullargspec"):
+        getargspec = inspect.getfullargspec  # type: ignore
+    else:
+        getargspec = inspect.getargspec  # type: ignore
+
+    a = getargspec(ClientConstructor.__init__)
+    return dict(zip(a.args[-len(a.defaults) :], a.defaults))
+
+
+DEFAULT_OPTIONS = _get_default_options()
+del _get_default_options
+
+
+VERSION = "0.12.2"
+SDK_INFO = {
+    "name": "sentry.python",
+    "version": VERSION,
+    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py
new file mode 100644
index 0000000..fe8ae50
--- /dev/null
+++ b/sentry_sdk/debug.py
@@ -0,0 +1,44 @@
+import sys
+import logging
+
+from sentry_sdk import utils
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.client import _client_init_debug
+from logging import LogRecord
+
+
+class _HubBasedClientFilter(logging.Filter):
+    def filter(self, record):
+        # type: (LogRecord) -> bool
+        if _client_init_debug.get(False):
+            return True
+        hub = Hub.current
+        if hub is not None and hub.client is not None:
+            return hub.client.options["debug"]
+        return False
+
+
+def init_debug_support():
+    # type: () -> None
+    if not logger.handlers:
+        configure_logger()
+    configure_debug_hub()
+
+
+def configure_logger():
+    # type: () -> None
+    _handler = logging.StreamHandler(sys.stderr)
+    _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
+    logger.addHandler(_handler)
+    logger.setLevel(logging.DEBUG)
+    logger.addFilter(_HubBasedClientFilter())
+
+
+def configure_debug_hub():
+    # type: () -> None
+    def _get_debug_hub():
+        # type: () -> Hub
+        return Hub.current
+
+    utils._get_debug_hub = _get_debug_hub
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
new file mode 100644
index 0000000..f804747
--- /dev/null
+++ b/sentry_sdk/hub.py
@@ -0,0 +1,589 @@
+import copy
+import random
+import sys
+import weakref
+
+from datetime import datetime
+from contextlib import contextmanager
+from warnings import warn
+
+from sentry_sdk._compat import with_metaclass
+from sentry_sdk.scope import Scope
+from sentry_sdk.client import Client
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import (
+    exc_info_from_error,
+    event_from_exception,
+    logger,
+    ContextVar,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Union
+    from typing import Any
+    from typing import Optional
+    from typing import Tuple
+    from typing import List
+    from typing import Callable
+    from typing import Generator
+    from typing import Type
+    from typing import TypeVar
+    from typing import overload
+    from typing import ContextManager
+
+    from sentry_sdk.integrations import Integration
+    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo
+    from sentry_sdk.consts import ClientConstructor
+
+    T = TypeVar("T")
+
+else:
+
+    def overload(x):
+        # type: (T) -> T
+        return x
+
+
+_local = ContextVar("sentry_current_hub")  # type: ignore
+_initial_client = None  # type: Optional[weakref.ReferenceType[Client]]
+
+
+def _should_send_default_pii():
+    # type: () -> bool
+    client = Hub.current.client
+    if not client:
+        return False
+    return client.options["send_default_pii"]
+
+
+class _InitGuard(object):
+    def __init__(self, client):
+        # type: (Client) -> None
+        self._client = client
+
+    def __enter__(self):
+        # type: () -> _InitGuard
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):
+        # type: (Any, Any, Any) -> None
+        c = self._client
+        if c is not None:
+            c.close()
+
+
+def _init(*args, **kwargs):
+    # type: (*Optional[str], **Any) -> ContextManager[Any]
+    """Initializes the SDK and optionally integrations.
+
+    This takes the same arguments as the client constructor.
+    """
+    global _initial_client
+    client = Client(*args, **kwargs)  # type: ignore
+    Hub.current.bind_client(client)
+    rv = _InitGuard(client)
+    if client is not None:
+        _initial_client = weakref.ref(client)
+    return rv
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    # Make mypy, PyCharm and other static analyzers think `init` is a type to
+    # have nicer autocompletion for params.
+    #
+    # Use `ClientConstructor` to define the argument types of `init` and
+    # `ContextManager[Any]` to tell static analyzers about the return type.
+
+    class init(ClientConstructor, ContextManager[Any]):
+        pass
+
+
+else:
+    # Alias `init` for actual usage. Go through the lambda indirection to throw
+    # PyCharm off of the weakly typed signature (it would otherwise discover
+    # both the weakly typed signature of `_init` and our faked `init` type).
+
+    init = (lambda: _init)()
+
+
+class HubMeta(type):
+    @property
+    def current(self):
+        # type: () -> Hub
+        """Returns the current instance of the hub."""
+        rv = _local.get(None)
+        if rv is None:
+            rv = Hub(GLOBAL_HUB)
+            _local.set(rv)
+        return rv
+
+    @property
+    def main(self):
+        # type: () -> Hub
+        """Returns the main instance of the hub."""
+        return GLOBAL_HUB
+
+
+class _ScopeManager(object):
+    def __init__(self, hub):
+        # type: (Hub) -> None
+        self._hub = hub
+        self._original_len = len(hub._stack)
+        self._layer = hub._stack[-1]
+
+    def __enter__(self):
+        # type: () -> Scope
+        scope = self._layer[1]
+        assert scope is not None
+        return scope
+
+    def __exit__(self, exc_type, exc_value, tb):
+        # type: (Any, Any, Any) -> None
+        current_len = len(self._hub._stack)
+        if current_len < self._original_len:
+            logger.error(
+                "Scope popped too soon. Popped %s scopes too many.",
+                self._original_len - current_len,
+            )
+            return
+        elif current_len > self._original_len:
+            logger.warning(
+                "Leaked %s scopes: %s",
+                current_len - self._original_len,
+                self._hub._stack[self._original_len :],
+            )
+
+        layer = self._hub._stack[self._original_len - 1]
+        del self._hub._stack[self._original_len - 1 :]
+
+        if layer[1] != self._layer[1]:
+            logger.error(
+                "Wrong scope found. Meant to pop %s, but popped %s.",
+                layer[1],
+                self._layer[1],
+            )
+        elif layer[0] != self._layer[0]:
+            warning = (
+                "init() called inside of pushed scope. This might be entirely "
+                "legitimate but usually occurs when initializing the SDK inside "
+                "a request handler or task/job function. Try to initialize the "
+                "SDK as early as possible instead."
+            )
+            logger.warning(warning)
+
+
+class Hub(with_metaclass(HubMeta)):  # type: ignore
+    """The hub wraps the concurrency management of the SDK.  Each thread has
+    its own hub but the hub might transfer with the flow of execution if
+    context vars are available.
+
+    If the hub is used with a with statement it's temporarily activated.
+    """
+
+    _stack = None  # type: List[Tuple[Optional[Client], Scope]]
+
+    # Mypy doesn't pick up on the metaclass.
+
+    if MYPY:
+        current = None  # type: Hub
+        main = None  # type: Hub
+
+    def __init__(
+        self,
+        client_or_hub=None,  # type: Optional[Union[Hub, Client]]
+        scope=None,  # type: Optional[Any]
+    ):
+        # type: (...) -> None
+        if isinstance(client_or_hub, Hub):
+            hub = client_or_hub
+            client, other_scope = hub._stack[-1]
+            if scope is None:
+                scope = copy.copy(other_scope)
+        else:
+            client = client_or_hub
+        if scope is None:
+            scope = Scope()
+
+        self._stack = [(client, scope)]
+        self._last_event_id = None  # type: Optional[str]
+        self._old_hubs = []  # type: List[Hub]
+
+    def __enter__(self):
+        # type: () -> Hub
+        self._old_hubs.append(Hub.current)
+        _local.set(self)
+        return self
+
+    def __exit__(
+        self,
+        exc_type,  # type: Optional[type]
+        exc_value,  # type: Optional[BaseException]
+        tb,  # type: Optional[Any]
+    ):
+        # type: (...) -> None
+        old = self._old_hubs.pop()
+        _local.set(old)
+
+    def run(
+        self, callback  # type: Callable[[], T]
+    ):
+        # type: (...) -> T
+        """Runs a callback in the context of the hub.  Alternatively the
+        with statement can be used on the hub directly.
+        """
+        with self:
+            return callback()
+
+    def get_integration(
+        self, name_or_class  # type: Union[str, Type[Integration]]
+    ):
+        # type: (...) -> Any
+        """Returns the integration for this hub by name or class.  If there
+        is no client bound or the client does not have that integration
+        then `None` is returned.
+
+        If the return value is not `None` the hub is guaranteed to have a
+        client attached.
+        """
+        if isinstance(name_or_class, str):
+            integration_name = name_or_class
+        elif name_or_class.identifier is not None:
+            integration_name = name_or_class.identifier
+        else:
+            raise ValueError("Integration has no name")
+
+        client = self._stack[-1][0]
+        if client is not None:
+            rv = client.integrations.get(integration_name)
+            if rv is not None:
+                return rv
+
+        if _initial_client is not None:
+            initial_client = _initial_client()
+        else:
+            initial_client = None
+
+        if (
+            initial_client is not None
+            and initial_client is not client
+            and initial_client.integrations.get(integration_name) is not None
+        ):
+            warning = (
+                "Integration %r attempted to run but it was only "
+                "enabled on init() but not the client that "
+                "was bound to the current flow.  Earlier versions of "
+                "the SDK would consider these integrations enabled but "
+                "this is no longer the case." % (name_or_class,)
+            )
+            warn(Warning(warning), stacklevel=3)
+            logger.warning(warning)
+
+    @property
+    def client(self):
+        # type: () -> Optional[Client]
+        """Returns the current client on the hub."""
+        return self._stack[-1][0]
+
+    def last_event_id(self):
+        # type: () -> Optional[str]
+        """Returns the last event ID."""
+        return self._last_event_id
+
+    def bind_client(
+        self, new  # type: Optional[Client]
+    ):
+        # type: (...) -> None
+        """Binds a new client to the hub."""
+        top = self._stack[-1]
+        self._stack[-1] = (new, top[1])
+
+    def capture_event(
+        self,
+        event,  # type: Event
+        hint=None,  # type: Optional[Hint]
+    ):
+        # type: (...) -> Optional[str]
+        """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.
+        """
+        client, scope = self._stack[-1]
+        if client is not None:
+            rv = client.capture_event(event, hint, scope)
+            if rv is not None:
+                self._last_event_id = rv
+            return rv
+        return None
+
+    def capture_message(
+        self,
+        message,  # type: str
+        level=None,  # type: Optional[str]
+    ):
+        # type: (...) -> Optional[str]
+        """Captures a message.  The message is just a string.  If no level
+        is provided the default level is `info`.
+
+        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+        """
+        if self.client is None:
+            return None
+        if level is None:
+            level = "info"
+        return self.capture_event({"message": message, "level": level})
+
+    def capture_exception(
+        self, error=None  # type: Optional[Union[BaseException, ExcInfo]]
+    ):
+        # type: (...) -> Optional[str]
+        """Captures an exception.
+
+        :param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
+
+        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+        """
+        client = self.client
+        if client is None:
+            return None
+        if error is None:
+            exc_info = sys.exc_info()
+        else:
+            exc_info = exc_info_from_error(error)
+
+        event, hint = event_from_exception(exc_info, client_options=client.options)
+        try:
+            return self.capture_event(event, hint=hint)
+        except Exception:
+            self._capture_internal_exception(sys.exc_info())
+
+        return None
+
+    def _capture_internal_exception(
+        self, exc_info  # type: Any
+    ):
+        # type: (...) -> Any
+        """
+        Capture an exception that is likely caused by a bug in the SDK
+        itself.
+
+        These exceptions do not end up in Sentry and are just logged instead.
+        """
+        logger.error("Internal error in sentry_sdk", exc_info=exc_info)  # type: ignore
+
+    def add_breadcrumb(
+        self,
+        crumb=None,  # type: Optional[Breadcrumb]
+        hint=None,  # type: Optional[BreadcrumbHint]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """
+        Adds a breadcrumb.
+
+        :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
+
+        :param hint: An optional value that can be used by `before_breadcrumb`
+            to customize the breadcrumbs that are emitted.
+        """
+        client, scope = self._stack[-1]
+        if client is None:
+            logger.info("Dropped breadcrumb because no client bound")
+            return
+
+        crumb = dict(crumb or ())  # type: Breadcrumb
+        crumb.update(kwargs)
+        if not crumb:
+            return
+
+        hint = dict(hint or ())  # type: Hint
+
+        if crumb.get("timestamp") is None:
+            crumb["timestamp"] = datetime.utcnow()
+        if crumb.get("type") is None:
+            crumb["type"] = "default"
+
+        if client.options["before_breadcrumb"] is not None:
+            new_crumb = client.options["before_breadcrumb"](crumb, hint)
+        else:
+            new_crumb = crumb
+
+        if new_crumb is not None:
+            scope._breadcrumbs.append(new_crumb)
+        else:
+            logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
+
+        max_breadcrumbs = client.options["max_breadcrumbs"]  # type: int
+        while len(scope._breadcrumbs) > max_breadcrumbs:
+            scope._breadcrumbs.popleft()
+
+    def start_span(
+        self,
+        span=None,  # type: Optional[Span]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Span
+        """
+        Create a new span whose parent span is the currently active
+        span, if any. The return value is the span object that can
+        be used as a context manager to start and stop timing.
+
+        Note that you will not see any span that is not contained
+        within a transaction. Create a transaction with
+        ``start_span(transaction="my transaction")`` if an
+        integration doesn't already do this for you.
+        """
+
+        client, scope = self._stack[-1]
+
+        kwargs.setdefault("hub", self)
+
+        if span is None:
+            if scope.span is not None:
+                span = scope.span.new_span(**kwargs)
+            else:
+                span = Span(**kwargs)
+
+        if span.sampled is None and span.transaction is not None:
+            sample_rate = client and client.options["traces_sample_rate"] or 0
+            span.sampled = random.random() < sample_rate
+
+        if span.sampled:
+            max_spans = (
+                client and client.options["_experiments"].get("max_spans") or 1000
+            )
+            span.init_finished_spans(maxlen=max_spans)
+
+        return span
+
+    @overload  # noqa
+    def push_scope(
+        self, callback=None  # type: Optional[None]
+    ):
+        # type: (...) -> ContextManager[Scope]
+        pass
+
+    @overload  # noqa
+    def push_scope(
+        self, callback  # type: Callable[[Scope], None]
+    ):
+        # type: (...) -> None
+        pass
+
+    def push_scope(  # noqa
+        self, callback=None  # type: Optional[Callable[[Scope], None]]
+    ):
+        # type: (...) -> Optional[ContextManager[Scope]]
+        """
+        Pushes a new layer on the scope stack.
+
+        :param callback: If provided, this method pushes a scope, calls
+            `callback`, and pops the scope again.
+
+        :returns: If no `callback` is provided, a context manager that should
+            be used to pop the scope again.
+        """
+
+        if callback is not None:
+            with self.push_scope() as scope:
+                callback(scope)
+            return None
+
+        client, scope = self._stack[-1]
+        new_layer = (client, copy.copy(scope))
+        self._stack.append(new_layer)
+
+        return _ScopeManager(self)
+
+    scope = push_scope
+
+    def pop_scope_unsafe(self):
+        # type: () -> Tuple[Optional[Client], Scope]
+        """
+        Pops a scope layer from the stack.
+
+        Try to use the context manager :py:meth:`push_scope` instead.
+        """
+        rv = self._stack.pop()
+        assert self._stack, "stack must have at least one layer"
+        return rv
+
+    @overload  # noqa
+    def configure_scope(
+        self, callback=None  # type: Optional[None]
+    ):
+        # type: (...) -> ContextManager[Scope]
+        pass
+
+    @overload  # noqa
+    def configure_scope(
+        self, callback  # type: Callable[[Scope], None]
+    ):
+        # type: (...) -> None
+        pass
+
+    def configure_scope(  # noqa
+        self, callback=None  # type: Optional[Callable[[Scope], None]]
+    ):  # noqa
+        # type: (...) -> Optional[ContextManager[Scope]]
+
+        """
+        Reconfigures the scope.
+
+        :param callback: If provided, call the callback with the current scope.
+
+        :returns: If no callback is provided, returns a context manager that returns the scope.
+        """
+
+        client, scope = self._stack[-1]
+        if callback is not None:
+            if client is not None:
+                callback(scope)
+
+            return None
+
+        @contextmanager
+        def inner():
+            # type: () -> Generator[Scope, None, None]
+            if client is not None:
+                yield scope
+            else:
+                yield Scope()
+
+        return inner()
+
+    def flush(
+        self,
+        timeout=None,  # type: Optional[float]
+        callback=None,  # type: Optional[Callable[[int, float], None]]
+    ):
+        # type: (...) -> None
+        """
+        Alias for :py:meth:`sentry_sdk.Client.flush`
+        """
+        client, scope = self._stack[-1]
+        if client is not None:
+            return client.flush(timeout=timeout, callback=callback)
+
+    def iter_trace_propagation_headers(self):
+        # type: () -> Generator[Tuple[str, str], None, None]
+        # TODO: Document
+        client, scope = self._stack[-1]
+        if scope._span is None:
+            return
+
+        propagate_traces = client and client.options["propagate_traces"]
+        if not propagate_traces:
+            return
+
+        if client and client.options["traceparent_v2"]:
+            traceparent = scope._span.to_traceparent()
+        else:
+            traceparent = scope._span.to_legacy_traceparent()
+
+        yield "sentry-trace", traceparent
+
+
+GLOBAL_HUB = Hub()
+_local.set(GLOBAL_HUB)
diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
new file mode 100644
index 0000000..9c5fa99
--- /dev/null
+++ b/sentry_sdk/integrations/__init__.py
@@ -0,0 +1,127 @@
+"""This package"""
+from __future__ import absolute_import
+
+from threading import Lock
+
+from sentry_sdk._compat import iteritems
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Iterator
+    from typing import Dict
+    from typing import List
+    from typing import Set
+    from typing import Type
+    from typing import Callable
+
+
+_installer_lock = Lock()
+_installed_integrations = set()  # type: Set[str]
+
+
+def _generate_default_integrations_iterator(*import_strings):
+    # type: (*str) -> Callable[[], Iterator[Type[Integration]]]
+    def iter_default_integrations():
+        # type: () -> Iterator[Type[Integration]]
+        """Returns an iterator of the default integration classes:
+        """
+        from importlib import import_module
+
+        for import_string in import_strings:
+            module, cls = import_string.rsplit(".", 1)
+            yield getattr(import_module(module), cls)
+
+    if isinstance(iter_default_integrations.__doc__, str):
+        for import_string in import_strings:
+            iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
+
+    return iter_default_integrations
+
+
+iter_default_integrations = _generate_default_integrations_iterator(
+    "sentry_sdk.integrations.logging.LoggingIntegration",
+    "sentry_sdk.integrations.stdlib.StdlibIntegration",
+    "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
+    "sentry_sdk.integrations.dedupe.DedupeIntegration",
+    "sentry_sdk.integrations.atexit.AtexitIntegration",
+    "sentry_sdk.integrations.modules.ModulesIntegration",
+    "sentry_sdk.integrations.argv.ArgvIntegration",
+    "sentry_sdk.integrations.threading.ThreadingIntegration",
+)
+
+del _generate_default_integrations_iterator
+
+
+def setup_integrations(integrations, with_defaults=True):
+    # type: (List[Integration], bool) -> Dict[str, Integration]
+    """Given a list of integration instances this installs them all.  When
+    `with_defaults` is set to `True` then all default integrations are added
+    unless they were already provided before.
+    """
+    integrations = dict(
+        (integration.identifier, integration) for integration in integrations or ()
+    )
+
+    logger.debug("Setting up integrations (with default = %s)", with_defaults)
+
+    if with_defaults:
+        for integration_cls in iter_default_integrations():
+            if integration_cls.identifier not in integrations:
+                instance = integration_cls()
+                integrations[instance.identifier] = instance
+
+    for identifier, integration in iteritems(integrations):  # type: ignore
+        with _installer_lock:
+            if identifier not in _installed_integrations:
+                logger.debug(
+                    "Setting up previously not enabled integration %s", identifier
+                )
+                try:
+                    type(integration).setup_once()
+                except NotImplementedError:
+                    if getattr(integration, "install", None) is not None:
+                        logger.warning(
+                            "Integration %s: The install method is "
+                            "deprecated. Use `setup_once`.",
+                            identifier,
+                        )
+                        integration.install()
+                    else:
+                        raise
+                _installed_integrations.add(identifier)
+
+    for identifier in integrations:
+        logger.debug("Enabling integration %s", identifier)
+
+    return integrations
+
+
+class Integration(object):
+    """Baseclass for all integrations.
+
+    To accept options for an integration, implement your own constructor that
+    saves those options on `self`.
+    """
+
+    install = None
+    """Legacy method, do not implement."""
+
+    identifier = None  # type: str
+    """String unique ID of integration type"""
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        Initialize the integration.
+
+        This function is only called once, ever. Configuration is not available
+        at this point, so the only thing to do here is to hook into exception
+        handlers, and perhaps do monkeypatches.
+
+        Inside those hooks `Integration.current` can be used to access the
+        instance again.
+        """
+        raise NotImplementedError()
diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
new file mode 100644
index 0000000..cb626a5
--- /dev/null
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -0,0 +1,155 @@
+import json
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import AnnotatedValue
+from sentry_sdk._compat import text_type, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Union
+
+
+SENSITIVE_ENV_KEYS = (
+    "REMOTE_ADDR",
+    "HTTP_X_FORWARDED_FOR",
+    "HTTP_SET_COOKIE",
+    "HTTP_COOKIE",
+    "HTTP_AUTHORIZATION",
+    "HTTP_X_FORWARDED_FOR",
+    "HTTP_X_REAL_IP",
+)
+
+SENSITIVE_HEADERS = tuple(
+    x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
+)
+
+
+class RequestExtractor(object):
+    def __init__(self, request):
+        # type: (Any) -> None
+        self.request = request
+
+    def extract_into_event(self, event):
+        # type: (Dict[str, Any]) -> None
+        client = Hub.current.client
+        if client is None:
+            return
+
+        data = None  # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
+
+        content_length = self.content_length()
+        request_info = event.setdefault("request", {})
+
+        if _should_send_default_pii():
+            request_info["cookies"] = dict(self.cookies())
+
+        bodies = client.options["request_bodies"]
+        if (
+            bodies == "never"
+            or (bodies == "small" and content_length > 10 ** 3)
+            or (bodies == "medium" and content_length > 10 ** 4)
+        ):
+            data = AnnotatedValue(
+                "",
+                {"rem": [["!config", "x", 0, content_length]], "len": content_length},
+            )
+        else:
+            parsed_body = self.parsed_body()
+            if parsed_body is not None:
+                data = parsed_body
+            elif self.raw_data():
+                data = AnnotatedValue(
+                    "",
+                    {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
+                )
+            else:
+                return
+
+        request_info["data"] = data
+
+    def content_length(self):
+        # type: () -> int
+        try:
+            return int(self.env().get("CONTENT_LENGTH", 0))
+        except ValueError:
+            return 0
+
+    def cookies(self):
+        raise NotImplementedError()
+
+    def raw_data(self):
+        raise NotImplementedError()
+
+    def form(self):
+        raise NotImplementedError()
+
+    def parsed_body(self):
+        # type: () -> Optional[Dict[str, Any]]
+        form = self.form()
+        files = self.files()
+        if form or files:
+            data = dict(iteritems(form))
+            for k, v in iteritems(files):
+                size = self.size_of_file(v)
+                data[k] = AnnotatedValue(
+                    "", {"len": size, "rem": [["!raw", "x", 0, size]]}
+                )
+
+            return data
+
+        return self.json()
+
+    def is_json(self):
+        # type: () -> bool
+        return _is_json_content_type(self.env().get("CONTENT_TYPE"))
+
+    def json(self):
+        # type: () -> Optional[Any]
+        try:
+            if self.is_json():
+                raw_data = self.raw_data()
+                if not isinstance(raw_data, text_type):
+                    raw_data = raw_data.decode("utf-8")
+                return json.loads(raw_data)
+        except ValueError:
+            pass
+
+        return None
+
+    def files(self):
+        raise NotImplementedError()
+
+    def size_of_file(self, file):
+        raise NotImplementedError()
+
+    def env(self):
+        raise NotImplementedError()
+
+
+def _is_json_content_type(ct):
+    # type: (str) -> bool
+    mt = (ct or "").split(";", 1)[0]
+    return (
+        mt == "application/json"
+        or (mt.startswith("application/"))
+        and mt.endswith("+json")
+    )
+
+
+def _filter_headers(headers):
+    # type: (Dict[str, str]) -> Dict[str, str]
+    if _should_send_default_pii():
+        return headers
+
+    return {
+        k: (
+            v
+            if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
+            else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+        )
+        for k, v in iteritems(headers)
+    }
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
new file mode 100644
index 0000000..aeef62e
--- /dev/null
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -0,0 +1,149 @@
+import sys
+import weakref
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    transaction_from_function,
+    HAS_REAL_CONTEXTVARS,
+)
+
+import asyncio
+from aiohttp.web import Application, HTTPException, UrlDispatcher  # type: ignore
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from aiohttp.web_request import Request  # type: ignore
+    from aiohttp.abc import AbstractMatchInfo  # type: ignore
+    from typing import Any
+    from typing import Dict
+    from typing import Tuple
+    from typing import Callable
+
+    from sentry_sdk.utils import ExcInfo
+    from sentry_sdk._types import EventProcessor
+
+
+class AioHttpIntegration(Integration):
+    identifier = "aiohttp"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        if not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise RuntimeError(
+                "The aiohttp integration for Sentry requires Python 3.7+ "
+                " or aiocontextvars package"
+            )
+
+        ignore_logger("aiohttp.server")
+
+        old_handle = Application._handle
+
+        async def sentry_app_handle(self, request, *args, **kwargs):
+            # type: (Any, Request, *Any, **Any) -> Any
+            async def inner():
+                # type: () -> Any
+                hub = Hub.current
+                if hub.get_integration(AioHttpIntegration) is None:
+                    return await old_handle(self, request, *args, **kwargs)
+
+                weak_request = weakref.ref(request)
+
+                with Hub(Hub.current) as hub:
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_request_processor(weak_request))
+
+                    # If this transaction name makes it to the UI, AIOHTTP's
+                    # URL resolver did not find a route or died trying.
+                    with hub.start_span(transaction="generic AIOHTTP request"):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException:
+                            raise
+                        except Exception:
+                            reraise(*_capture_exception(hub))
+
+                        return response
+
+            # Explicitly wrap in task such that current contextvar context is
+            # copied. Just doing `return await inner()` will leak scope data
+            # between requests.
+            return await asyncio.get_event_loop().create_task(inner())
+
+        Application._handle = sentry_app_handle
+
+        old_urldispatcher_resolve = UrlDispatcher.resolve
+
+        async def sentry_urldispatcher_resolve(self, request):
+            # type: (UrlDispatcher, Request) -> AbstractMatchInfo
+            rv = await old_urldispatcher_resolve(self, request)
+
+            name = None
+
+            try:
+                name = transaction_from_function(rv.handler)
+            except Exception:
+                pass
+
+            if name is not None:
+                with Hub.current.configure_scope() as scope:
+                    scope.transaction = name
+
+            return rv
+
+        UrlDispatcher.resolve = sentry_urldispatcher_resolve
+
+
+def _make_request_processor(weak_request):
+    # type: (Callable[[], Request]) -> EventProcessor
+    def aiohttp_processor(
+        event,  # type: Dict[str, Any]
+        hint,  # type: Dict[str, Tuple[type, BaseException, Any]]
+    ):
+        # type: (...) -> Dict[str, Any]
+        request = weak_request()
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            # TODO: Figure out what to do with request body. Methods on request
+            # are async, but event processors are not.
+
+            request_info = event.setdefault("request", {})
+
+            request_info["url"] = "%s://%s%s" % (
+                request.scheme,
+                request.host,
+                request.path,
+            )
+
+            request_info["query_string"] = request.query_string
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+        return event
+
+    return aiohttp_processor
+
+
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=hub.client.options,  # type: ignore
+        mechanism={"type": "aiohttp", "handled": False},
+    )
+    hub.capture_event(event, hint=hint)
+    return exc_info
diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py
new file mode 100644
index 0000000..f005521
--- /dev/null
+++ b/sentry_sdk/integrations/argv.py
@@ -0,0 +1,33 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+
+class ArgvIntegration(Integration):
+    identifier = "argv"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if Hub.current.get_integration(ArgvIntegration) is not None:
+                extra = event.setdefault("extra", {})
+                # If some event processor decided to set extra to e.g. an
+                # `int`, don't crash. Not here.
+                if isinstance(extra, dict):
+                    extra["sys.argv"] = sys.argv
+
+            return event
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
new file mode 100644
index 0000000..efbbe0a
--- /dev/null
+++ b/sentry_sdk/integrations/asgi.py
@@ -0,0 +1,154 @@
+"""
+An ASGI middleware.
+
+Based on Tom Christie's `sentry-asgi `_.
+"""
+
+import functools
+import urllib
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.utils import ContextVar, event_from_exception, transaction_from_function
+from sentry_sdk.tracing import Span
+
+if MYPY:
+    from typing import Dict
+    from typing import Any
+
+
+_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
+
+
+def _capture_exception(hub, exc):
+    # type: (Hub, Any) -> None
+
+    # Check client here as it might have been unset while streaming response
+    if hub.client is not None:
+        event, hint = event_from_exception(
+            exc,
+            client_options=hub.client.options,
+            mechanism={"type": "asgi", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+
+class SentryAsgiMiddleware:
+    __slots__ = ("app",)
+
+    def __init__(self, app):
+        self.app = app
+
+    def __call__(self, scope, receive=None, send=None):
+        if receive is None or send is None:
+
+            async def run_asgi2(receive, send):
+                return await self._run_app(
+                    scope, lambda: self.app(scope)(receive, send)
+                )
+
+            return run_asgi2
+        else:
+            return self._run_app(scope, lambda: self.app(scope, receive, send))
+
+    async def _run_app(self, scope, callback):
+        if _asgi_middleware_applied.get(False):
+            return await callback()
+
+        _asgi_middleware_applied.set(True)
+        try:
+            hub = Hub(Hub.current)
+            with hub:
+                with hub.configure_scope() as sentry_scope:
+                    sentry_scope.clear_breadcrumbs()
+                    sentry_scope._name = "asgi"
+                    processor = functools.partial(
+                        self.event_processor, asgi_scope=scope
+                    )
+                    sentry_scope.add_event_processor(processor)
+
+                if scope["type"] in ("http", "websocket"):
+                    span = Span.continue_from_headers(dict(scope["headers"]))
+                    span.op = "{}.server".format(scope["type"])
+                else:
+                    span = Span()
+                    span.op = "asgi.server"
+
+                span.set_tag("asgi.type", scope["type"])
+                span.transaction = "generic ASGI request"
+
+                with hub.start_span(span) as span:
+                    try:
+                        return await callback()
+                    except Exception as exc:
+                        _capture_exception(hub, exc)
+                        raise exc from None
+        finally:
+            _asgi_middleware_applied.set(False)
+
+    def event_processor(self, event, hint, asgi_scope):
+        request_info = event.setdefault("request", {})
+
+        if asgi_scope["type"] in ("http", "websocket"):
+            request_info["url"] = self.get_url(asgi_scope)
+            request_info["method"] = asgi_scope["method"]
+            request_info["headers"] = _filter_headers(self.get_headers(asgi_scope))
+            request_info["query_string"] = self.get_query(asgi_scope)
+
+        if asgi_scope.get("client") and _should_send_default_pii():
+            request_info["env"] = {"REMOTE_ADDR": asgi_scope["client"][0]}
+
+        if asgi_scope.get("endpoint"):
+            # Webframeworks like Starlette mutate the ASGI env once routing is
+            # done, which is sometime after the request has started. If we have
+            # an endpoint, overwrite our path-based transaction name.
+            event["transaction"] = self.get_transaction(asgi_scope)
+        return event
+
+    def get_url(self, scope):
+        """
+        Extract URL from the ASGI scope, without also including the querystring.
+        """
+        scheme = scope.get("scheme", "http")
+        server = scope.get("server", None)
+        path = scope.get("root_path", "") + scope["path"]
+
+        for key, value in scope["headers"]:
+            if key == b"host":
+                host_header = value.decode("latin-1")
+                return "%s://%s%s" % (scheme, host_header, path)
+
+        if server is not None:
+            host, port = server
+            default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
+            if port != default_port:
+                return "%s://%s:%s%s" % (scheme, host, port, path)
+            return "%s://%s%s" % (scheme, host, path)
+        return path
+
+    def get_query(self, scope):
+        """
+        Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
+        """
+        return urllib.parse.unquote(scope["query_string"].decode("latin-1"))
+
+    def get_headers(self, scope):
+        """
+        Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
+        """
+        headers = {}  # type: Dict[str, str]
+        for raw_key, raw_value in scope["headers"]:
+            key = raw_key.decode("latin-1")
+            value = raw_value.decode("latin-1")
+            if key in headers:
+                headers[key] = headers[key] + ", " + value
+            else:
+                headers[key] = value
+        return headers
+
+    def get_transaction(self, scope):
+        """
+        Return a transaction string to identify the routed endpoint.
+        """
+        return transaction_from_function(scope["endpoint"])
diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py
new file mode 100644
index 0000000..ecaa82b
--- /dev/null
+++ b/sentry_sdk/integrations/atexit.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import atexit
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+
+    from typing import Any
+    from typing import Optional
+
+
+def default_callback(pending, timeout):
+    """This is the default shutdown callback that is set on the options.
+    It prints out a message to stderr that informs the user that some events
+    are still pending and the process is waiting for them to flush out.
+    """
+
+    def echo(msg):
+        # type: (str) -> None
+        sys.stderr.write(msg + "\n")
+
+    echo("Sentry is attempting to send %i pending error messages" % pending)
+    echo("Waiting up to %s seconds" % timeout)
+    echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
+    sys.stderr.flush()
+
+
+class AtexitIntegration(Integration):
+    identifier = "atexit"
+
+    def __init__(self, callback=None):
+        # type: (Optional[Any]) -> None
+        if callback is None:
+            callback = default_callback
+        self.callback = callback
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @atexit.register
+        def _shutdown():
+            logger.debug("atexit: got shutdown signal")
+            hub = Hub.main
+            integration = hub.get_integration(AtexitIntegration)
+            if integration is not None:
+                logger.debug("atexit: shutting down client")
+
+                # If an integration is there, a client has to be there.
+                client = hub.client  # type: Any
+                client.close(callback=integration.callback)
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
new file mode 100644
index 0000000..c96f9ab
--- /dev/null
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -0,0 +1,196 @@
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+
+
+def _wrap_handler(handler):
+    def sentry_handler(event, context, *args, **kwargs):
+        hub = Hub.current
+        integration = hub.get_integration(AwsLambdaIntegration)
+        if integration is None:
+            return handler(event, context, *args, **kwargs)
+
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                scope.clear_breadcrumbs()
+                scope.transaction = context.function_name
+                scope.add_event_processor(_make_request_event_processor(event, context))
+
+            try:
+                return handler(event, context, *args, **kwargs)
+            except Exception:
+                exc_info = sys.exc_info()
+                event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "aws_lambda", "handled": False},
+                )
+                hub.capture_event(event, hint=hint)
+                reraise(*exc_info)
+
+    return sentry_handler
+
+
+def _drain_queue():
+    with capture_internal_exceptions():
+        hub = Hub.current
+        integration = hub.get_integration(AwsLambdaIntegration)
+        if integration is not None:
+            # Flush out the event queue before AWS kills the
+            # process.
+            hub.flush()
+
+
+class AwsLambdaIntegration(Integration):
+    identifier = "aws_lambda"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import __main__ as lambda_bootstrap  # type: ignore
+
+        pre_37 = True  # Python 3.6 or 2.7
+
+        if not hasattr(lambda_bootstrap, "handle_http_request"):
+            try:
+                import bootstrap as lambda_bootstrap  # type: ignore
+
+                pre_37 = False  # Python 3.7
+            except ImportError:
+                pass
+
+        if not hasattr(lambda_bootstrap, "handle_event_request"):
+            logger.warning(
+                "Not running in AWS Lambda environment, "
+                "AwsLambdaIntegration disabled"
+            )
+            return
+
+        if pre_37:
+            old_handle_event_request = lambda_bootstrap.handle_event_request
+
+            def sentry_handle_event_request(request_handler, *args, **kwargs):
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_event_request(request_handler, *args, **kwargs)
+
+            lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+            old_handle_http_request = lambda_bootstrap.handle_http_request
+
+            def sentry_handle_http_request(request_handler, *args, **kwargs):
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_http_request(request_handler, *args, **kwargs)
+
+            lambda_bootstrap.handle_http_request = sentry_handle_http_request
+
+            # Patch to_json to drain the queue. This should work even when the
+            # SDK is initialized inside of the handler
+
+            old_to_json = lambda_bootstrap.to_json
+
+            def sentry_to_json(*args, **kwargs):
+                _drain_queue()
+                return old_to_json(*args, **kwargs)
+
+            lambda_bootstrap.to_json = sentry_to_json
+        else:
+            old_handle_event_request = lambda_bootstrap.handle_event_request
+
+            def sentry_handle_event_request(  # type: ignore
+                lambda_runtime_client, request_handler, *args, **kwargs
+            ):
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_event_request(
+                    lambda_runtime_client, request_handler, *args, **kwargs
+                )
+
+            lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+            # Patch the runtime client to drain the queue. This should work
+            # even when the SDK is initialized inside of the handler
+
+            def _wrap_post_function(f):
+                def inner(*args, **kwargs):
+                    _drain_queue()
+                    return f(*args, **kwargs)
+
+                return inner
+
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
+                lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+            )
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
+                lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+            )
+
+
+def _make_request_event_processor(aws_event, aws_context):
+    def event_processor(event, hint):
+        extra = event.setdefault("extra", {})
+        extra["lambda"] = {
+            "remaining_time_in_millis": aws_context.get_remaining_time_in_millis(),
+            "function_name": aws_context.function_name,
+            "function_version": aws_context.function_version,
+            "invoked_function_arn": aws_context.invoked_function_arn,
+            "aws_request_id": aws_context.aws_request_id,
+        }
+
+        request = event.setdefault("request", {})
+
+        if "httpMethod" in aws_event:
+            request["method"] = aws_event["httpMethod"]
+
+        request["url"] = _get_url(aws_event, aws_context)
+
+        if "queryStringParameters" in aws_event:
+            request["query_string"] = aws_event["queryStringParameters"]
+
+        if "headers" in aws_event:
+            request["headers"] = _filter_headers(aws_event["headers"])
+
+        if aws_event.get("body", None):
+            # Unfortunately couldn't find a way to get structured body from AWS
+            # event. Meaning every body is unstructured to us.
+            request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+
+        if _should_send_default_pii():
+            user_info = event.setdefault("user", {})
+
+            id = aws_event.get("identity", {}).get("userArn")
+            if id is not None:
+                user_info["id"] = id
+
+            ip = aws_event.get("identity", {}).get("sourceIp")
+            if ip is not None:
+                user_info["ip_address"] = ip
+
+        return event
+
+    return event_processor
+
+
+def _get_url(event, context):
+    path = event.get("path", None)
+    headers = event.get("headers", {})
+    host = headers.get("Host", None)
+    proto = headers.get("X-Forwarded-Proto", None)
+    if proto and host and path:
+        return "{}://{}{}".format(proto, host, path)
+    return "awslambda:///{}".format(context.function_name)
diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py
new file mode 100644
index 0000000..3098f04
--- /dev/null
+++ b/sentry_sdk/integrations/beam.py
@@ -0,0 +1,156 @@
+from __future__ import absolute_import
+
+import sys
+import types
+from functools import wraps
+
+from sentry_sdk.hub import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+
+WRAPPED_FUNC = "_wrapped_{}_"
+INSPECT_FUNC = "_inspect_{}"  # Required format per apache_beam/transforms/core.py
+USED_FUNC = "_sentry_used_"
+
+
+class BeamIntegration(Integration):
+    identifier = "beam"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        from apache_beam.transforms.core import DoFn, ParDo  # type: ignore
+
+        ignore_logger("root")
+        ignore_logger("bundle_processor.create")
+
+        function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
+        for func_name in function_patches:
+            setattr(
+                DoFn,
+                INSPECT_FUNC.format(func_name),
+                _wrap_inspect_call(DoFn, func_name),
+            )
+
+        old_init = ParDo.__init__
+
+        def sentry_init_pardo(self, fn, *args, **kwargs):
+            # Do not monkey patch init twice
+            if not getattr(self, "_sentry_is_patched", False):
+                for func_name in function_patches:
+                    if not hasattr(fn, func_name):
+                        continue
+                    wrapped_func = WRAPPED_FUNC.format(func_name)
+
+                    # Check to see if inspect is set and process is not
+                    # to avoid monkey patching process twice.
+                    # Check to see if function is part of object for
+                    # backwards compatibility.
+                    process_func = getattr(fn, func_name)
+                    inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
+                    if not getattr(inspect_func, USED_FUNC, False) and not getattr(
+                        process_func, USED_FUNC, False
+                    ):
+                        setattr(fn, wrapped_func, process_func)
+                        setattr(fn, func_name, _wrap_task_call(process_func))
+
+                self._sentry_is_patched = True
+            old_init(self, fn, *args, **kwargs)
+
+        ParDo.__init__ = sentry_init_pardo
+
+
+def _wrap_inspect_call(cls, func_name):
+    from apache_beam.typehints.decorators import getfullargspec  # type: ignore
+
+    if not hasattr(cls, func_name):
+        return None
+
+    def _inspect(self):
+        """
+        Inspect function overrides the way Beam gets argspec.
+        """
+        wrapped_func = WRAPPED_FUNC.format(func_name)
+        if hasattr(self, wrapped_func):
+            process_func = getattr(self, wrapped_func)
+        else:
+            process_func = getattr(self, func_name)
+            setattr(self, func_name, _wrap_task_call(process_func))
+            setattr(self, wrapped_func, process_func)
+
+        # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
+        # (which uses Signatures internally) should be used instead.
+        try:
+            from apache_beam.transforms.core import get_function_args_defaults
+
+            return get_function_args_defaults(process_func)
+        except ImportError:
+            return getfullargspec(process_func)
+
+    setattr(_inspect, USED_FUNC, True)
+    return _inspect
+
+
+def _wrap_task_call(func):
+    """
+    Wrap task call with a try catch to get exceptions.
+    Pass the client on to raise_exception so it can get rebinded.
+    """
+    client = Hub.current.client
+
+    @wraps(func)
+    def _inner(*args, **kwargs):
+        try:
+            gen = func(*args, **kwargs)
+        except Exception:
+            raise_exception(client)
+
+        if not isinstance(gen, types.GeneratorType):
+            return gen
+        return _wrap_generator_call(gen, client)
+
+    setattr(_inner, USED_FUNC, True)
+    return _inner
+
+
+def _capture_exception(exc_info, hub):
+    """
+    Send Beam exception to Sentry.
+    """
+    integration = hub.get_integration(BeamIntegration)
+    if integration:
+        client = hub.client
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "beam", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+
+def raise_exception(client):
+    """
+    Raise an exception. If the client is not in the hub, rebind it.
+    """
+    hub = Hub.current
+    if hub.client is None:
+        hub.bind_client(client)
+    exc_info = sys.exc_info()
+    with capture_internal_exceptions():
+        _capture_exception(exc_info, hub)
+    reraise(*exc_info)
+
+
+def _wrap_generator_call(gen, client):
+    """
+    Wrap the generator to handle any failures.
+    """
+    while True:
+        try:
+            yield next(gen)
+        except StopIteration:
+            break
+        except Exception:
+            raise_exception(client)
diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py
new file mode 100644
index 0000000..b008a19
--- /dev/null
+++ b/sentry_sdk/integrations/bottle.py
@@ -0,0 +1,182 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    transaction_from_function,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from typing import Any
+    from typing import Dict
+    from typing import Callable
+    from typing import Optional
+    from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
+
+from bottle import (
+    Bottle,
+    Route,
+    request as bottle_request,
+    HTTPResponse,
+)  # type: ignore
+
+
+class BottleIntegration(Integration):
+    identifier = "bottle"
+
+    transaction_style = None
+
+    def __init__(self, transaction_style="endpoint"):
+        # type: (str) -> None
+        TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        # monkey patch method Bottle.__call__
+        old_app = Bottle.__call__
+
+        def sentry_patched_wsgi_app(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable) -> _ScopedResponse
+
+            hub = Hub.current
+            integration = hub.get_integration(BottleIntegration)
+            if integration is None:
+                return old_app(self, environ, start_response)
+
+            return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+                environ, start_response
+            )
+
+        Bottle.__call__ = sentry_patched_wsgi_app  # type: ignore
+
+        # monkey patch method Bottle._handle
+        old_handle = Bottle._handle
+
+        def _patched_handle(self, environ):
+            hub = Hub.current
+            integration = hub.get_integration(BottleIntegration)
+            if integration is None:
+                return old_handle(self, environ)
+
+            # create new scope
+            scope_manager = hub.push_scope()
+
+            with scope_manager:
+                app = self
+                with hub.configure_scope() as scope:
+                    scope._name = "bottle"
+                    scope.add_event_processor(
+                        _make_request_event_processor(app, bottle_request, integration)
+                    )
+                res = old_handle(self, environ)
+
+            # scope cleanup
+            return res
+
+        Bottle._handle = _patched_handle
+
+        # monkey patch method Route._make_callback
+        old_make_callback = Route._make_callback
+
+        def patched_make_callback(self, *args, **kwargs):
+            hub = Hub.current
+            integration = hub.get_integration(BottleIntegration)
+            prepared_callback = old_make_callback(self, *args, **kwargs)
+            if integration is None:
+                return prepared_callback
+
+            # If an integration is there, a client has to be there.
+            client = hub.client  # type: Any
+
+            def wrapped_callback(*args, **kwargs):
+                def capture_exception(exception):
+                    event, hint = event_from_exception(
+                        exception,
+                        client_options=client.options,
+                        mechanism={"type": "bottle", "handled": False},
+                    )
+                    hub.capture_event(event, hint=hint)
+
+                try:
+                    res = prepared_callback(*args, **kwargs)
+                except HTTPResponse:
+                    raise
+                except Exception as exception:
+                    capture_exception(exception)
+                    raise exception
+
+                return res
+
+            return wrapped_callback
+
+        Route._make_callback = patched_make_callback
+
+
+class BottleRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return self.request.cookies
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body.read()
+
+    def form(self):
+        # type: () -> FormsDict
+        if self.is_json():
+            return None
+        return self.request.forms.decode()
+
+    def files(self):
+        # type: () -> Optional[Dict[str, str]]
+        if self.is_json():
+            return None
+
+        return self.request.files
+
+    def size_of_file(self, file):
+        # type: (FileUpload) -> int
+        return file.content_length
+
+
+def _make_request_event_processor(app, request, integration):
+    # type: (Bottle, LocalRequest, BottleIntegration) -> Callable
+    def inner(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+        try:
+            if integration.transaction_style == "endpoint":
+                event["transaction"] = request.route.name or transaction_from_function(
+                    request.route.callback
+                )
+            elif integration.transaction_style == "url":
+                event["transaction"] = request.route.rule  # type: ignore
+        except Exception:
+            pass
+
+        with capture_internal_exceptions():
+            BottleRequestExtractor(request).extract_into_event(event)
+
+        return event
+
+    return inner
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
new file mode 100644
index 0000000..c95be9e
--- /dev/null
+++ b/sentry_sdk/integrations/celery.py
@@ -0,0 +1,209 @@
+from __future__ import absolute_import
+
+import functools
+import sys
+
+from celery.exceptions import (  # type: ignore
+    SoftTimeLimitExceeded,
+    Retry,
+    Ignore,
+    Reject,
+)
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import Span
+from sentry_sdk._compat import reraise
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+
+
+CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
+
+
+class CeleryIntegration(Integration):
+    identifier = "celery"
+
+    def __init__(self, propagate_traces=True):
+        # type: (bool) -> None
+        self.propagate_traces = propagate_traces
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import celery.app.trace as trace  # type: ignore
+
+        old_build_tracer = trace.build_tracer
+
+        def sentry_build_tracer(name, task, *args, **kwargs):
+            if not getattr(task, "_sentry_is_patched", False):
+                # Need to patch both methods because older celery sometimes
+                # short-circuits to task.run if it thinks it's safe.
+                task.__call__ = _wrap_task_call(task, task.__call__)
+                task.run = _wrap_task_call(task, task.run)
+                task.apply_async = _wrap_apply_async(task, task.apply_async)
+
+                # `build_tracer` is apparently called for every task
+                # invocation. Can't wrap every celery task for every invocation
+                # or we will get infinitely nested wrapper functions.
+                task._sentry_is_patched = True
+
+            return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs))
+
+        trace.build_tracer = sentry_build_tracer
+
+        _patch_worker_exit()
+
+        # This logger logs every status of every task that ran on the worker.
+        # Meaning that every task's breadcrumbs are full of stuff like "Task
+        #  raised unexpected ".
+        ignore_logger("celery.worker.job")
+        ignore_logger("celery.app.trace")
+
+
+def _wrap_apply_async(task, f):
+    @functools.wraps(f)
+    def apply_async(*args, **kwargs):
+        hub = Hub.current
+        integration = hub.get_integration(CeleryIntegration)
+        if integration is not None and integration.propagate_traces:
+            headers = None
+            for key, value in hub.iter_trace_propagation_headers():
+                if headers is None:
+                    headers = dict(kwargs.get("headers") or {})
+                headers[key] = value
+            if headers is not None:
+                kwargs["headers"] = headers
+
+            with hub.start_span(op="celery.submit", description=task.name):
+                return f(*args, **kwargs)
+        else:
+            return f(*args, **kwargs)
+
+    return apply_async
+
+
+def _wrap_tracer(task, f):
+    # Need to wrap tracer for pushing the scope before prerun is sent, and
+    # popping it after postrun is sent.
+    #
+    # This is the reason we don't use signals for hooking in the first place.
+    # Also because in Celery 3, signal dispatch returns early if one handler
+    # crashes.
+    @functools.wraps(f)
+    def _inner(*args, **kwargs):
+        hub = Hub.current
+        if hub.get_integration(CeleryIntegration) is None:
+            return f(*args, **kwargs)
+
+        with hub.push_scope() as scope:
+            scope._name = "celery"
+            scope.clear_breadcrumbs()
+            scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
+
+            span = Span.continue_from_headers(args[3].get("headers") or {})
+            span.op = "celery.task"
+            span.transaction = "unknown celery task"
+
+            with capture_internal_exceptions():
+                # Celery task objects are not a thing to be trusted. Even
+                # something such as attribute access can fail.
+                span.transaction = task.name
+
+            with hub.start_span(span):
+                return f(*args, **kwargs)
+
+    return _inner
+
+
+def _wrap_task_call(task, f):
+    # Need to wrap task call because the exception is caught before we get to
+    # see it. Also celery's reported stacktrace is untrustworthy.
+
+    # functools.wraps is important here because celery-once looks at this
+    # method's name.
+    # https://github.com/getsentry/sentry-python/issues/421
+    @functools.wraps(f)
+    def _inner(*args, **kwargs):
+        try:
+            return f(*args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            with capture_internal_exceptions():
+                _capture_exception(task, exc_info)
+            reraise(*exc_info)
+
+    return _inner
+
+
+def _make_event_processor(task, uuid, args, kwargs, request=None):
+    def event_processor(event, hint):
+        with capture_internal_exceptions():
+            extra = event.setdefault("extra", {})
+            extra["celery-job"] = {
+                "task_name": task.name,
+                "args": args,
+                "kwargs": kwargs,
+            }
+
+        if "exc_info" in hint:
+            with capture_internal_exceptions():
+                if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
+                    event["fingerprint"] = [
+                        "celery",
+                        "SoftTimeLimitExceeded",
+                        getattr(task, "name", task),
+                    ]
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(task, exc_info):
+    hub = Hub.current
+
+    if hub.get_integration(CeleryIntegration) is None:
+        return
+    if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
+        return
+    if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "celery", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+    with capture_internal_exceptions():
+        with hub.configure_scope() as scope:
+            scope.span.set_failure()
+
+
+def _patch_worker_exit():
+    # Need to flush queue before worker shutdown because a crashing worker will
+    # call os._exit
+    from billiard.pool import Worker  # type: ignore
+
+    old_workloop = Worker.workloop
+
+    def sentry_workloop(*args, **kwargs):
+        try:
+            return old_workloop(*args, **kwargs)
+        finally:
+            with capture_internal_exceptions():
+                hub = Hub.current
+                if hub.get_integration(CeleryIntegration) is not None:
+                    hub.flush()
+
+    Worker.workloop = sentry_workloop
diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py
new file mode 100644
index 0000000..b023df2
--- /dev/null
+++ b/sentry_sdk/integrations/dedupe.py
@@ -0,0 +1,43 @@
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+
+class DedupeIntegration(Integration):
+    identifier = "dedupe"
+
+    def __init__(self):
+        # type: () -> None
+        self._last_seen = ContextVar("last-seen")
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if hint is None:
+                return event
+
+            integration = Hub.current.get_integration(DedupeIntegration)
+
+            if integration is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+            if exc_info is None:
+                return event
+
+            exc = exc_info[1]
+            if integration._last_seen.get(None) is exc:
+                return None
+            integration._last_seen.set(exc)
+            return event
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
new file mode 100644
index 0000000..37ecad3
--- /dev/null
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -0,0 +1,448 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import sys
+import threading
+import weakref
+
+from django import VERSION as DJANGO_VERSION  # type: ignore
+from django.core import signals  # type: ignore
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import HAS_REAL_CONTEXTVARS
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from typing import Union
+
+    from django.core.handlers.wsgi import WSGIRequest  # type: ignore
+    from django.http.response import HttpResponse  # type: ignore
+    from django.http.request import QueryDict  # type: ignore
+    from django.utils.datastructures import MultiValueDict  # type: ignore
+
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from sentry_sdk._types import Event, Hint
+
+
+try:
+    from django.urls import resolve  # type: ignore
+except ImportError:
+    from django.core.urlresolvers import resolve  # type: ignore
+
+from sentry_sdk import Hub
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.serializer import add_global_repr_processor
+from sentry_sdk.tracing import record_sql_queries
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    transaction_from_function,
+    walk_exception_chain,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
+from sentry_sdk.integrations.django.templates import get_template_frame_from_exception
+from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+
+
+if DJANGO_VERSION < (1, 10):
+
+    def is_authenticated(request_user):
+        # type: (Any) -> bool
+        return request_user.is_authenticated()
+
+
+else:
+
+    def is_authenticated(request_user):
+        # type: (Any) -> bool
+        return request_user.is_authenticated
+
+
+class DjangoIntegration(Integration):
+    identifier = "django"
+
+    transaction_style = None
+    middleware_spans = None
+
+    def __init__(self, transaction_style="url", middleware_spans=True):
+        # type: (str, bool) -> None
+        TRANSACTION_STYLE_VALUES = ("function_name", "url")
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self.middleware_spans = middleware_spans
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        install_sql_hook()
+        # Patch in our custom middleware.
+
+        # logs an error for every 500
+        ignore_logger("django.server")
+        ignore_logger("django.request")
+
+        from django.core.handlers.wsgi import WSGIHandler
+
+        old_app = WSGIHandler.__call__
+
+        def sentry_patched_wsgi_handler(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable) -> _ScopedResponse
+            if Hub.current.get_integration(DjangoIntegration) is None:
+                return old_app(self, environ, start_response)
+
+            bound_old_app = old_app.__get__(self, WSGIHandler)
+
+            return SentryWsgiMiddleware(bound_old_app)(environ, start_response)
+
+        WSGIHandler.__call__ = sentry_patched_wsgi_handler
+
+        # patch get_response, because at that point we have the Django request
+        # object
+        from django.core.handlers.base import BaseHandler  # type: ignore
+
+        old_get_response = BaseHandler.get_response
+
+        def sentry_patched_get_response(self, request):
+            # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
+            hub = Hub.current
+            integration = hub.get_integration(DjangoIntegration)
+            if integration is not None:
+                _patch_drf()
+
+                with hub.configure_scope() as scope:
+                    # Rely on WSGI middleware to start a trace
+                    try:
+                        if integration.transaction_style == "function_name":
+                            scope.transaction = transaction_from_function(
+                                resolve(request.path).func
+                            )
+                        elif integration.transaction_style == "url":
+                            scope.transaction = LEGACY_RESOLVER.resolve(request.path)
+                    except Exception:
+                        pass
+
+                    scope.add_event_processor(
+                        _make_event_processor(weakref.ref(request), integration)
+                    )
+            return old_get_response(self, request)
+
+        BaseHandler.get_response = sentry_patched_get_response
+
+        signals.got_request_exception.connect(_got_request_exception)
+
+        @add_global_event_processor
+        def process_django_templates(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_, exc_value, _) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                frame = get_template_frame_from_exception(exc_value)
+                if frame is not None:
+                    frames = exception.get("stacktrace", {}).get("frames", [])
+
+                    for i in reversed(range(len(frames))):
+                        f = frames[i]
+                        if (
+                            f.get("function") in ("parse", "render")
+                            and f.get("module") == "django.template.base"
+                        ):
+                            i += 1
+                            break
+                    else:
+                        i = len(frames)
+
+                    frames.insert(i, frame)
+
+            return event
+
+        @add_global_repr_processor
+        def _django_queryset_repr(value, hint):
+            try:
+                # Django 1.6 can fail to import `QuerySet` when Django settings
+                # have not yet been initialized.
+                #
+                # If we fail to import, return `NotImplemented`. It's at least
+                # unlikely that we have a query set in `value` when importing
+                # `QuerySet` fails.
+                from django.db.models.query import QuerySet  # type: ignore
+            except Exception:
+                return NotImplemented
+
+            if not isinstance(value, QuerySet) or value._result_cache:
+                return NotImplemented
+
+            # Do not call Hub.get_integration here. It is intentional that
+            # running under a new hub does not suddenly start executing
+            # querysets. This might be surprising to the user but it's likely
+            # less annoying.
+
+            return u"<%s from %s at 0x%x>" % (
+                value.__class__.__name__,
+                value.__module__,
+                id(value),
+            )
+
+        _patch_channels()
+        patch_django_middlewares()
+
+
+_DRF_PATCHED = False
+_DRF_PATCH_LOCK = threading.Lock()
+
+
+def _patch_drf():
+    """
+    Patch Django Rest Framework for more/better request data. DRF's request
+    type is a wrapper around Django's request type. The attribute we're
+    interested in is `request.data`, which is a cached property containing a
+    parsed request body. Reading a request body from that property is more
+    reliable than reading from any of Django's own properties, as those don't
+    hold payloads in memory and therefore can only be accessed once.
+
+    We patch the Django request object to include a weak backreference to the
+    DRF request object, such that we can later use either in
+    `DjangoRequestExtractor`.
+
+    This function is not called directly on SDK setup, because importing almost
+    any part of Django Rest Framework will try to access Django settings (where
+    `sentry_sdk.init()` might be called from in the first place). Instead we
+    run this function on every request and do the patching on the first
+    request.
+    """
+
+    global _DRF_PATCHED
+
+    if _DRF_PATCHED:
+        # Double-checked locking
+        return
+
+    with _DRF_PATCH_LOCK:
+        if _DRF_PATCHED:
+            return
+
+        # We set this regardless of whether the code below succeeds or fails.
+        # There is no point in trying to patch again on the next request.
+        _DRF_PATCHED = True
+
+        with capture_internal_exceptions():
+            try:
+                from rest_framework.views import APIView  # type: ignore
+            except ImportError:
+                pass
+            else:
+                old_drf_initial = APIView.initial
+
+                def sentry_patched_drf_initial(self, request, *args, **kwargs):
+                    with capture_internal_exceptions():
+                        request._request._sentry_drf_request_backref = weakref.ref(
+                            request
+                        )
+                        pass
+                    return old_drf_initial(self, request, *args, **kwargs)
+
+                APIView.initial = sentry_patched_drf_initial
+
+
+def _patch_channels():
+    try:
+        from channels.http import AsgiHandler  # type: ignore
+    except ImportError:
+        return
+
+    if not HAS_REAL_CONTEXTVARS:
+        # We better have contextvars or we're going to leak state between
+        # requests.
+        raise RuntimeError(
+            "We detected that you are using Django channels 2.0. To get proper "
+            "instrumentation for ASGI requests, the Sentry SDK requires "
+            "Python 3.7+ or the aiocontextvars package from PyPI."
+        )
+
+    from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+    old_app = AsgiHandler.__call__
+
+    def sentry_patched_asgi_handler(self, receive, send):
+        if Hub.current.get_integration(DjangoIntegration) is None:
+            return old_app(receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            lambda _scope: old_app.__get__(self, AsgiHandler)
+        )
+
+        return middleware(self.scope)(receive, send)
+
+    AsgiHandler.__call__ = sentry_patched_asgi_handler
+
+
+def _make_event_processor(weak_request, integration):
+    # type: (Callable[[], WSGIRequest], DjangoIntegration) -> Callable
+    def event_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        request = weak_request()
+        if request is None:
+            return event
+
+        try:
+            drf_request = request._sentry_drf_request_backref()
+            if drf_request is not None:
+                request = drf_request
+        except AttributeError:
+            pass
+
+        with capture_internal_exceptions():
+            DjangoRequestExtractor(request).extract_into_event(event)
+
+        if _should_send_default_pii():
+            with capture_internal_exceptions():
+                _set_user_info(request, event)
+
+        return event
+
+    return event_processor
+
+
+def _got_request_exception(request=None, **kwargs):
+    # type: (WSGIRequest, **Any) -> None
+    hub = Hub.current
+    integration = hub.get_integration(DjangoIntegration)
+    if integration is not None:
+
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            sys.exc_info(),
+            client_options=client.options,
+            mechanism={"type": "django", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+
+class DjangoRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.META
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return self.request.COOKIES
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> QueryDict
+        return self.request.POST
+
+    def files(self):
+        # type: () -> MultiValueDict
+        return self.request.FILES
+
+    def size_of_file(self, file):
+        return file.size
+
+    def parsed_body(self):
+        try:
+            return self.request.data
+        except AttributeError:
+            return RequestExtractor.parsed_body(self)
+
+
+def _set_user_info(request, event):
+    # type: (WSGIRequest, Dict[str, Any]) -> None
+    user_info = event.setdefault("user", {})
+
+    user = getattr(request, "user", None)
+
+    if user is None or not is_authenticated(user):
+        return
+
+    try:
+        user_info["id"] = str(user.pk)
+    except Exception:
+        pass
+
+    try:
+        user_info["email"] = user.email
+    except Exception:
+        pass
+
+    try:
+        user_info["username"] = user.get_username()
+    except Exception:
+        pass
+
+
+def install_sql_hook():
+    # type: () -> None
+    """If installed this causes Django's queries to be captured."""
+    try:
+        from django.db.backends.utils import CursorWrapper  # type: ignore
+    except ImportError:
+        from django.db.backends.util import CursorWrapper  # type: ignore
+
+    try:
+        real_execute = CursorWrapper.execute
+        real_executemany = CursorWrapper.executemany
+    except AttributeError:
+        # This won't work on Django versions < 1.6
+        return
+
+    def execute(self, sql, params=None):
+        hub = Hub.current
+        if hub.get_integration(DjangoIntegration) is None:
+            return real_execute(self, sql, params)
+
+        with record_sql_queries(
+            hub, self.cursor, sql, params, paramstyle="format", executemany=False
+        ):
+            return real_execute(self, sql, params)
+
+    def executemany(self, sql, param_list):
+        hub = Hub.current
+        if hub.get_integration(DjangoIntegration) is None:
+            return real_executemany(self, sql, param_list)
+
+        with record_sql_queries(
+            hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
+        ):
+            return real_executemany(self, sql, param_list)
+
+    CursorWrapper.execute = execute
+    CursorWrapper.executemany = executemany
+    ignore_logger("django.db.backends")
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
new file mode 100644
index 0000000..7cf6521
--- /dev/null
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -0,0 +1,106 @@
+"""
+Create spans from Django middleware invocations
+"""
+
+from functools import wraps
+
+from django import VERSION as DJANGO_VERSION  # type: ignore
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import ContextVar, transaction_from_function
+
+_import_string_should_wrap_middleware = ContextVar(
+    "import_string_should_wrap_middleware"
+)
+
+if DJANGO_VERSION < (1, 7):
+    import_string_name = "import_by_path"
+else:
+    import_string_name = "import_string"
+
+
+def patch_django_middlewares():
+    from django.core.handlers import base
+
+    old_import_string = getattr(base, import_string_name)
+
+    def sentry_patched_import_string(dotted_path):
+        rv = old_import_string(dotted_path)
+
+        if _import_string_should_wrap_middleware.get(None):
+            rv = _wrap_middleware(rv, dotted_path)
+
+        return rv
+
+    setattr(base, import_string_name, sentry_patched_import_string)
+
+    old_load_middleware = base.BaseHandler.load_middleware
+
+    def sentry_patched_load_middleware(self):
+        _import_string_should_wrap_middleware.set(True)
+        try:
+            return old_load_middleware(self)
+        finally:
+            _import_string_should_wrap_middleware.set(False)
+
+    base.BaseHandler.load_middleware = sentry_patched_load_middleware
+
+
+def _wrap_middleware(middleware, middleware_name):
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    def _get_wrapped_method(old_method):
+        @wraps(old_method)
+        def sentry_wrapped_method(*args, **kwargs):
+            hub = Hub.current
+            integration = hub.get_integration(DjangoIntegration)
+            if integration is None or not integration.middleware_spans:
+                return old_method(*args, **kwargs)
+
+            function_name = transaction_from_function(old_method)
+
+            description = middleware_name
+            function_basename = getattr(old_method, "__name__", None)
+            if function_basename:
+                description = "{}.{}".format(description, function_basename)
+
+            with hub.start_span(
+                op="django.middleware", description=description
+            ) as span:
+                span.set_tag("django.function_name", function_name)
+                span.set_tag("django.middleware_name", middleware_name)
+                return old_method(*args, **kwargs)
+
+        return sentry_wrapped_method
+
+    class SentryWrappingMiddleware(object):
+        def __init__(self, *args, **kwargs):
+            self._inner = middleware(*args, **kwargs)
+            self._call_method = None
+
+        # We need correct behavior for `hasattr()`, which we can only determine
+        # when we have an instance of the middleware we're wrapping.
+        def __getattr__(self, method_name):
+            if method_name not in (
+                "process_request",
+                "process_view",
+                "process_template_response",
+                "process_response",
+                "process_exception",
+            ):
+                raise AttributeError()
+
+            old_method = getattr(self._inner, method_name)
+            rv = _get_wrapped_method(old_method)
+            self.__dict__[method_name] = rv
+            return rv
+
+        def __call__(self, *args, **kwargs):
+            if self._call_method is None:
+                self._call_method = _get_wrapped_method(self._inner.__call__)
+            return self._call_method(*args, **kwargs)
+
+    if hasattr(middleware, "__name__"):
+        SentryWrappingMiddleware.__name__ = middleware.__name__
+
+    return SentryWrappingMiddleware
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
new file mode 100644
index 0000000..2f99976
--- /dev/null
+++ b/sentry_sdk/integrations/django/templates.py
@@ -0,0 +1,117 @@
+from django.template import TemplateSyntaxError  # type: ignore
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+
+try:
+    # support Django 1.9
+    from django.template.base import Origin  # type: ignore
+except ImportError:
+    # backward compatibility
+    from django.template.loader import LoaderOrigin as Origin  # type: ignore
+
+
+def get_template_frame_from_exception(exc_value):
+    # type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
+
+    # As of Django 1.9 or so the new template debug thing showed up.
+    if hasattr(exc_value, "template_debug"):
+        return _get_template_frame_from_debug(exc_value.template_debug)  # type: ignore
+
+    # As of r16833 (Django) all exceptions may contain a
+    # ``django_template_source`` attribute (rather than the legacy
+    # ``TemplateSyntaxError.source`` check)
+    if hasattr(exc_value, "django_template_source"):
+        return _get_template_frame_from_source(
+            exc_value.django_template_source  # type: ignore
+        )
+
+    if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
+        source = exc_value.source
+        if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
+            return _get_template_frame_from_source(source)
+
+    return None
+
+
+def _get_template_frame_from_debug(debug):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    if debug is None:
+        return None
+
+    lineno = debug["line"]
+    filename = debug["name"]
+    if filename is None:
+        filename = ""
+
+    pre_context = []
+    post_context = []
+    context_line = None
+
+    for i, line in debug["source_lines"]:
+        if i < lineno:
+            pre_context.append(line)
+        elif i > lineno:
+            post_context.append(line)
+        else:
+            context_line = line
+
+    return {
+        "filename": filename,
+        "lineno": lineno,
+        "pre_context": pre_context[-5:],
+        "post_context": post_context[:5],
+        "context_line": context_line,
+        "in_app": True,
+    }
+
+
+def _linebreak_iter(template_source):
+    yield 0
+    p = template_source.find("\n")
+    while p >= 0:
+        yield p + 1
+        p = template_source.find("\n", p + 1)
+
+
+def _get_template_frame_from_source(source):
+    if not source:
+        return None
+
+    origin, (start, end) = source
+    filename = getattr(origin, "loadname", None)
+    if filename is None:
+        filename = ""
+    template_source = origin.reload()
+    lineno = None
+    upto = 0
+    pre_context = []
+    post_context = []
+    context_line = None
+
+    for num, next in enumerate(_linebreak_iter(template_source)):
+        line = template_source[upto:next]
+        if start >= upto and end <= next:
+            lineno = num
+            context_line = line
+        elif lineno is None:
+            pre_context.append(line)
+        else:
+            post_context.append(line)
+
+        upto = next
+
+    if context_line is None or lineno is None:
+        return None
+
+    return {
+        "filename": filename,
+        "lineno": lineno,
+        "pre_context": pre_context[-5:],
+        "post_context": post_context[:5],
+        "context_line": context_line,
+    }
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
new file mode 100644
index 0000000..5e69532
--- /dev/null
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -0,0 +1,134 @@
+"""
+Copied from raven-python. Used for
+`DjangoIntegration(transaction_fron="raven_legacy")`.
+"""
+
+from __future__ import absolute_import
+
+import re
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from django.urls.resolvers import URLResolver  # type: ignore
+    from typing import Dict
+    from typing import List
+    from typing import Optional
+    from django.urls.resolvers import URLPattern  # type: ignore
+    from typing import Tuple
+    from typing import Union
+    from re import Pattern  # type: ignore
+
+try:
+    from django.urls import get_resolver  # type: ignore
+except ImportError:
+    from django.core.urlresolvers import get_resolver  # type: ignore
+
+
+def get_regex(resolver_or_pattern):
+    # type: (Union[URLPattern, URLResolver]) -> Pattern
+    """Utility method for django's deprecated resolver.regex"""
+    try:
+        regex = resolver_or_pattern.regex
+    except AttributeError:
+        regex = resolver_or_pattern.pattern.regex
+    return regex
+
+
+class RavenResolver(object):
+    _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
+    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
+    _non_named_group_matcher = re.compile(r"\([^\)]+\)")
+    # [foo|bar|baz]
+    _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
+    _camel_re = re.compile(r"([A-Z]+)([a-z])")
+
+    _cache = {}  # type: Dict[URLPattern, str]
+
+    def _simplify(self, pattern):
+        # type: (str) -> str
+        r"""
+        Clean up urlpattern regexes into something readable by humans:
+
+        From:
+        > "^(?P\w+)/athletes/(?P\w+)/$"
+
+        To:
+        > "{sport_slug}/athletes/{athlete_slug}/"
+        """
+        # remove optional params
+        # TODO(dcramer): it'd be nice to change these into [%s] but it currently
+        # conflicts with the other rules because we're doing regexp matches
+        # rather than parsing tokens
+        result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
+
+        # handle named groups first
+        result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
+
+        # handle non-named groups
+        result = self._non_named_group_matcher.sub("{var}", result)
+
+        # handle optional params
+        result = self._either_option_matcher.sub(lambda m: m.group(1), result)
+
+        # clean up any outstanding regex-y characters.
+        result = (
+            result.replace("^", "")
+            .replace("$", "")
+            .replace("?", "")
+            .replace("//", "/")
+            .replace("\\", "")
+        )
+
+        return result
+
+    def _resolve(self, resolver, path, parents=None):
+        # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
+
+        match = get_regex(resolver).search(path)  # Django < 2.0
+
+        if not match:
+            return None
+
+        if parents is None:
+            parents = [resolver]
+        elif resolver not in parents:
+            parents = parents + [resolver]
+
+        new_path = path[match.end() :]
+        for pattern in resolver.url_patterns:
+            # this is an include()
+            if not pattern.callback:
+                match = self._resolve(pattern, new_path, parents)
+                if match:
+                    return match
+                continue
+            elif not get_regex(pattern).search(new_path):
+                continue
+
+            try:
+                return self._cache[pattern]
+            except KeyError:
+                pass
+
+            prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
+            result = prefix + self._simplify(get_regex(pattern).pattern)
+            if not result.startswith("/"):
+                result = "/" + result
+            self._cache[pattern] = result
+            return result
+
+        return None
+
+    def resolve(
+        self,
+        path,  # type: str
+        urlconf=None,  # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
+    ):
+        # type: (...) -> str
+        resolver = get_resolver(urlconf)
+        match = self._resolve(resolver, path)
+        return match or path
+
+
+LEGACY_RESOLVER = RavenResolver()
diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py
new file mode 100644
index 0000000..7791de3
--- /dev/null
+++ b/sentry_sdk/integrations/excepthook.py
@@ -0,0 +1,68 @@
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Callable
+    from typing import Any
+
+
+class ExcepthookIntegration(Integration):
+    identifier = "excepthook"
+
+    always_run = False
+
+    def __init__(self, always_run=False):
+        # type: (bool) -> None
+
+        if not isinstance(always_run, bool):
+            raise ValueError(
+                "Invalid value for always_run: %s (must be type boolean)"
+                % (always_run,)
+            )
+        self.always_run = always_run
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        sys.excepthook = _make_excepthook(sys.excepthook)
+
+
+def _make_excepthook(old_excepthook):
+    # type: (Callable) -> Callable
+    def sentry_sdk_excepthook(exctype, value, traceback):
+        hub = Hub.current
+        integration = hub.get_integration(ExcepthookIntegration)
+
+        if integration is not None and _should_send(integration.always_run):
+            # If an integration is there, a client has to be there.
+            client = hub.client  # type: Any
+
+            with capture_internal_exceptions():
+                event, hint = event_from_exception(
+                    (exctype, value, traceback),
+                    client_options=client.options,
+                    mechanism={"type": "excepthook", "handled": False},
+                )
+                hub.capture_event(event, hint=hint)
+
+        return old_excepthook(exctype, value, traceback)
+
+    return sentry_sdk_excepthook
+
+
+def _should_send(always_run=False):
+    # type: (bool) -> bool
+    if always_run:
+        return True
+
+    if hasattr(sys, "ps1"):
+        # Disable the excepthook for interactive Python shells, otherwise
+        # every typo gets sent to Sentry.
+        return False
+
+    return True
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
new file mode 100644
index 0000000..06dbb1d
--- /dev/null
+++ b/sentry_sdk/integrations/falcon.py
@@ -0,0 +1,176 @@
+from __future__ import absolute_import
+
+import falcon  # type: ignore
+import falcon.api_helpers  # type: ignore
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+
+
+class FalconRequestExtractor(RequestExtractor):
+    def env(self):
+        return self.request.env
+
+    def cookies(self):
+        return self.request.cookies
+
+    def form(self):
+        return None  # No such concept in Falcon
+
+    def files(self):
+        return None  # No such concept in Falcon
+
+    def raw_data(self):
+        # As request data can only be read once we won't make this available
+        # to Sentry. Just send back a dummy string in case there was a
+        # content length.
+        # TODO(jmagnusson): Figure out if there's a way to support this
+        content_length = self.content_length()
+        if content_length > 0:
+            return "[REQUEST_CONTAINING_RAW_DATA]"
+        else:
+            return None
+
+    def json(self):
+        try:
+            return self.request.media
+        except falcon.errors.HTTPBadRequest:
+            # NOTE(jmagnusson): We return `falcon.Request._media` here because
+            # falcon 1.4 doesn't do proper type checking in
+            # `falcon.Request.media`. This has been fixed in 2.0.
+            # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
+            return self.request._media
+
+
+class SentryFalconMiddleware(object):
+    """Captures exceptions in Falcon requests and send to Sentry"""
+
+    def process_request(self, req, resp, *args, **kwargs):
+        hub = Hub.current
+        integration = hub.get_integration(FalconIntegration)
+        if integration is None:
+            return
+
+        with hub.configure_scope() as scope:
+            scope._name = "falcon"
+            scope.add_event_processor(_make_request_event_processor(req, integration))
+
+
+class FalconIntegration(Integration):
+    identifier = "falcon"
+
+    transaction_style = None
+
+    def __init__(self, transaction_style="uri_template"):
+        # type: (str) -> None
+        TRANSACTION_STYLE_VALUES = ("uri_template", "path")
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _patch_wsgi_app()
+        _patch_handle_exception()
+        _patch_prepare_middleware()
+
+
+def _patch_wsgi_app():
+    original_wsgi_app = falcon.API.__call__
+
+    def sentry_patched_wsgi_app(self, env, start_response):
+        hub = Hub.current
+        integration = hub.get_integration(FalconIntegration)
+        if integration is None:
+            return original_wsgi_app(self, env, start_response)
+
+        sentry_wrapped = SentryWsgiMiddleware(
+            lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)
+        )
+
+        return sentry_wrapped(env, start_response)
+
+    falcon.API.__call__ = sentry_patched_wsgi_app
+
+
+def _patch_handle_exception():
+    original_handle_exception = falcon.API._handle_exception
+
+    def sentry_patched_handle_exception(self, *args):
+        # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
+        # method signature from `(ex, req, resp, params)` to
+        # `(req, resp, ex, params)`
+        if isinstance(args[0], Exception):
+            ex = args[0]
+        else:
+            ex = args[2]
+
+        was_handled = original_handle_exception(self, *args)
+
+        hub = Hub.current
+        integration = hub.get_integration(FalconIntegration)
+
+        if integration is not None and not _is_falcon_http_error(ex):
+            # If an integration is there, a client has to be there.
+            client = hub.client  # type: Any
+
+            event, hint = event_from_exception(
+                ex,
+                client_options=client.options,
+                mechanism={"type": "falcon", "handled": False},
+            )
+            hub.capture_event(event, hint=hint)
+
+        return was_handled
+
+    falcon.API._handle_exception = sentry_patched_handle_exception
+
+
+def _patch_prepare_middleware():
+    original_prepare_middleware = falcon.api_helpers.prepare_middleware
+
+    def sentry_patched_prepare_middleware(
+        middleware=None, independent_middleware=False
+    ):
+        hub = Hub.current
+        integration = hub.get_integration(FalconIntegration)
+        if integration is not None:
+            middleware = [SentryFalconMiddleware()] + (middleware or [])
+        return original_prepare_middleware(middleware, independent_middleware)
+
+    falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
+
+
+def _is_falcon_http_error(ex):
+    return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
+
+
+def _make_request_event_processor(req, integration):
+    # type: (falcon.Request, FalconIntegration) -> Callable
+
+    def inner(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        if integration.transaction_style == "uri_template":
+            event["transaction"] = req.uri_template
+        elif integration.transaction_style == "path":
+            event["transaction"] = req.path
+
+        with capture_internal_exceptions():
+            FalconRequestExtractor(req).extract_into_event(event)
+
+        return event
+
+    return inner
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
new file mode 100644
index 0000000..8f23f07
--- /dev/null
+++ b/sentry_sdk/integrations/flask.py
@@ -0,0 +1,237 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from typing import Any
+    from typing import Dict
+    from werkzeug.datastructures import ImmutableTypeConversionDict
+    from werkzeug.datastructures import ImmutableMultiDict
+    from werkzeug.datastructures import FileStorage
+    from typing import Union
+    from typing import Callable
+
+try:
+    import flask_login  # type: ignore
+except ImportError:
+    flask_login = None
+
+from flask import Request, Flask, _request_ctx_stack, _app_ctx_stack  # type: ignore
+from flask.signals import (
+    appcontext_pushed,
+    appcontext_tearing_down,
+    got_request_exception,
+    request_started,
+)
+
+
+class FlaskIntegration(Integration):
+    identifier = "flask"
+
+    transaction_style = None
+
+    def __init__(self, transaction_style="endpoint"):
+        # type: (str) -> None
+        TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        appcontext_pushed.connect(_push_appctx)
+        appcontext_tearing_down.connect(_pop_appctx)
+        request_started.connect(_request_started)
+        got_request_exception.connect(_capture_exception)
+
+        old_app = Flask.__call__
+
+        def sentry_patched_wsgi_app(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable) -> _ScopedResponse
+            if Hub.current.get_integration(FlaskIntegration) is None:
+                return old_app(self, environ, start_response)
+
+            return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+                environ, start_response
+            )
+
+        Flask.__call__ = sentry_patched_wsgi_app  # type: ignore
+
+
+def _push_appctx(*args, **kwargs):
+    # type: (*Flask, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(FlaskIntegration) is not None:
+        # always want to push scope regardless of whether WSGI app might already
+        # have (not the case for CLI for example)
+        scope_manager = hub.push_scope()
+        scope_manager.__enter__()
+        _app_ctx_stack.top.sentry_sdk_scope_manager = scope_manager
+        with hub.configure_scope() as scope:
+            scope._name = "flask"
+
+
+def _pop_appctx(*args, **kwargs):
+    # type: (*Flask, **Any) -> None
+    scope_manager = getattr(_app_ctx_stack.top, "sentry_sdk_scope_manager", None)
+    if scope_manager is not None:
+        scope_manager.__exit__(None, None, None)
+
+
+def _request_started(sender, **kwargs):
+    # type: (Flask, **Any) -> None
+    hub = Hub.current
+    integration = hub.get_integration(FlaskIntegration)
+    if integration is None:
+        return
+
+    app = _app_ctx_stack.top.app
+    with hub.configure_scope() as scope:
+        request = _request_ctx_stack.top.request
+
+        # Rely on WSGI middleware to start a trace
+        try:
+            if integration.transaction_style == "endpoint":
+                scope.transaction = request.url_rule.endpoint  # type: ignore
+            elif integration.transaction_style == "url":
+                scope.transaction = request.url_rule.rule  # type: ignore
+        except Exception:
+            pass
+
+        weak_request = weakref.ref(request)
+        scope.add_event_processor(
+            _make_request_event_processor(  # type: ignore
+                app, weak_request, integration
+            )
+        )
+
+
+class FlaskRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> ImmutableTypeConversionDict
+        return self.request.cookies
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.get_data()
+
+    def form(self):
+        # type: () -> ImmutableMultiDict
+        return self.request.form
+
+    def files(self):
+        # type: () -> ImmutableMultiDict
+        return self.request.files
+
+    def is_json(self):
+        return self.request.is_json
+
+    def json(self):
+        return self.request.get_json()
+
+    def size_of_file(self, file):
+        # type: (FileStorage) -> int
+        return file.content_length
+
+
+def _make_request_event_processor(app, weak_request, integration):
+    # type: (Flask, Callable[[], Request], FlaskIntegration) -> Callable
+    def inner(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        request = weak_request()
+
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            FlaskRequestExtractor(request).extract_into_event(event)
+
+        if _should_send_default_pii():
+            with capture_internal_exceptions():
+                _add_user_to_event(event)
+
+        return event
+
+    return inner
+
+
+def _capture_exception(sender, exception, **kwargs):
+    # type: (Flask, Union[ValueError, BaseException], **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(FlaskIntegration) is None:
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        exception,
+        client_options=client.options,
+        mechanism={"type": "flask", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+    # type: (Dict[str, Any]) -> None
+    if flask_login is None:
+        return
+
+    user = flask_login.current_user
+    if user is None:
+        return
+
+    with capture_internal_exceptions():
+        # Access this object as late as possible as accessing the user
+        # is relatively costly
+
+        user_info = event.setdefault("user", {})
+
+        try:
+            user_info["id"] = user.get_id()
+            # TODO: more configurable user attrs here
+        except AttributeError:
+            # might happen if:
+            # - flask_login could not be imported
+            # - flask_login is not configured
+            # - no user is logged in
+            pass
+
+        # The following attribute accesses are ineffective for the general
+        # Flask-Login case, because the User interface of Flask-Login does not
+        # care about anything but the ID. However, Flask-User (based on
+        # Flask-Login) documents a few optional extra attributes.
+        #
+        # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
+
+        try:
+            user_info["email"] = user_info["username"] = user.email
+        except Exception:
+            pass
+
+        try:
+            user_info["username"] = user.username
+        except Exception:
+            pass
diff --git a/sentry_sdk/integrations/gnu_backtrace.py b/sentry_sdk/integrations/gnu_backtrace.py
new file mode 100644
index 0000000..6671de9
--- /dev/null
+++ b/sentry_sdk/integrations/gnu_backtrace.py
@@ -0,0 +1,106 @@
+import re
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+
+
+MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
+TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
+HEXVAL_RE = r"[A-Fa-f0-9]+"
+
+
+FRAME_RE = r"""
+^(?P\d+)\.\s
+(?P{MODULE_RE})\(
+  (?P{TYPE_RE}\ )?
+  ((?P{TYPE_RE})
+    (?P\(.*\))?
+  )?
+  ((?P\ const)?\+0x(?P{HEXVAL_RE}))?
+\)\s
+\[0x(?P{HEXVAL_RE})\]$
+""".format(
+    MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
+)
+
+FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
+
+
+class GnuBacktraceIntegration(Integration):
+    identifier = "gnu_backtrace"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def process_gnu_backtrace(event, hint):
+            with capture_internal_exceptions():
+                return _process_gnu_backtrace(event, hint)
+
+
+def _process_gnu_backtrace(event, hint):
+    # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+    if Hub.current.get_integration(GnuBacktraceIntegration) is None:
+        return event
+
+    exc_info = hint.get("exc_info", None)
+
+    if exc_info is None:
+        return event
+
+    exception = event.get("exception", None)
+
+    if exception is None:
+        return event
+
+    values = exception.get("values", None)
+
+    if values is None:
+        return event
+
+    for exception in values:
+        frames = exception.get("stacktrace", {}).get("frames", [])
+        if not frames:
+            continue
+
+        msg = exception.get("value", None)
+        if not msg:
+            continue
+
+        additional_frames = []
+        new_msg = []
+
+        for line in msg.splitlines():
+            match = FRAME_RE.match(line)
+            if match:
+                additional_frames.append(
+                    (
+                        int(match.group("index")),
+                        {
+                            "package": match.group("package") or None,
+                            "function": match.group("function") or None,
+                            "platform": "native",
+                        },
+                    )
+                )
+            else:
+                # Put garbage lines back into message, not sure what to do with them.
+                new_msg.append(line)
+
+        if additional_frames:
+            additional_frames.sort(key=lambda x: -x[0])
+            for _, frame in additional_frames:
+                frames.append(frame)
+
+            new_msg.append("")
+            exception["value"] = "\n".join(new_msg)
+
+    return event
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
new file mode 100644
index 0000000..53564fd
--- /dev/null
+++ b/sentry_sdk/integrations/logging.py
@@ -0,0 +1,236 @@
+from __future__ import absolute_import
+
+import logging
+import datetime
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+    to_string,
+    event_from_exception,
+    current_stacktrace,
+    capture_internal_exceptions,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk._compat import iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from logging import LogRecord
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+
+DEFAULT_LEVEL = logging.INFO
+DEFAULT_EVENT_LEVEL = logging.ERROR
+
+_IGNORED_LOGGERS = set(["sentry_sdk.errors"])
+
+
+def ignore_logger(
+    name  # type: str
+):
+    # type: (...) -> None
+    """This disables recording (both in breadcrumbs and as events) calls to
+    a logger of a specific name.  Among other uses, many of our integrations
+    use this to prevent their actions being recorded as breadcrumbs. Exposed
+    to users as a way to quiet spammy loggers.
+
+    :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
+    """
+    _IGNORED_LOGGERS.add(name)
+
+
+class LoggingIntegration(Integration):
+    identifier = "logging"
+
+    def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL):
+        # type: (Optional[int], Optional[int]) -> None
+        self._handler = None
+        self._breadcrumb_handler = None
+
+        if level is not None:
+            self._breadcrumb_handler = BreadcrumbHandler(level=level)
+
+        if event_level is not None:
+            self._handler = EventHandler(level=event_level)
+
+    def _handle_record(self, record):
+        # type: (LogRecord) -> None
+        if self._handler is not None and record.levelno >= self._handler.level:
+            self._handler.handle(record)
+
+        if (
+            self._breadcrumb_handler is not None
+            and record.levelno >= self._breadcrumb_handler.level
+        ):
+            self._breadcrumb_handler.handle(record)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        old_callhandlers = logging.Logger.callHandlers  # type: ignore
+
+        def sentry_patched_callhandlers(self, record):
+            # type: (Any, LogRecord) -> Any
+            try:
+                return old_callhandlers(self, record)
+            finally:
+                # This check is done twice, once also here before we even get
+                # the integration.  Otherwise we have a high chance of getting
+                # into a recursion error when the integration is resolved
+                # (this also is slower).
+                if record.name not in _IGNORED_LOGGERS:
+                    integration = Hub.current.get_integration(LoggingIntegration)
+                    if integration is not None:
+                        integration._handle_record(record)
+
+        logging.Logger.callHandlers = sentry_patched_callhandlers  # type: ignore
+
+
+def _can_record(record):
+    # type: (LogRecord) -> bool
+    return record.name not in _IGNORED_LOGGERS
+
+
+def _breadcrumb_from_record(record):
+    # type: (LogRecord) -> Dict[str, Any]
+    return {
+        "ty": "log",
+        "level": _logging_to_event_level(record.levelname),
+        "category": record.name,
+        "message": record.message,
+        "timestamp": datetime.datetime.utcfromtimestamp(record.created),
+        "data": _extra_from_record(record),
+    }
+
+
+def _logging_to_event_level(levelname):
+    # type: (str) -> str
+    return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
+
+
+COMMON_RECORD_ATTRS = frozenset(
+    (
+        "args",
+        "created",
+        "exc_info",
+        "exc_text",
+        "filename",
+        "funcName",
+        "levelname",
+        "levelno",
+        "linenno",
+        "lineno",
+        "message",
+        "module",
+        "msecs",
+        "msg",
+        "name",
+        "pathname",
+        "process",
+        "processName",
+        "relativeCreated",
+        "stack",
+        "tags",
+        "thread",
+        "threadName",
+    )
+)
+
+
+def _extra_from_record(record):
+    # type: (LogRecord) -> Dict[str, None]
+    return {
+        k: v
+        for k, v in iteritems(vars(record))
+        if k not in COMMON_RECORD_ATTRS
+        and (not isinstance(k, str) or not k.startswith("_"))
+    }
+
+
+class EventHandler(logging.Handler, object):
+    """
+    A logging handler that emits Sentry events for each log record
+
+    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+    """
+
+    def emit(self, record):
+        # type: (LogRecord) -> Any
+        with capture_internal_exceptions():
+            self.format(record)
+            return self._emit(record)
+
+    def _emit(self, record):
+        # type: (LogRecord) -> None
+        if not _can_record(record):
+            return
+
+        hub = Hub.current
+        if hub.client is None:
+            return
+
+        client_options = hub.client.options
+
+        # exc_info might be None or (None, None, None)
+        if record.exc_info is not None and record.exc_info[0] is not None:
+            event, hint = event_from_exception(
+                record.exc_info,
+                client_options=client_options,
+                mechanism={"type": "logging", "handled": True},
+            )
+        elif record.exc_info and record.exc_info[0] is None:
+            event = {}
+            hint = {}
+            with capture_internal_exceptions():
+                event["threads"] = {
+                    "values": [
+                        {
+                            "stacktrace": current_stacktrace(
+                                client_options["with_locals"]
+                            ),
+                            "crashed": False,
+                            "current": True,
+                        }
+                    ]
+                }
+        else:
+            event = {}
+            hint = {}
+
+        hint["log_record"] = record
+
+        event["level"] = _logging_to_event_level(record.levelname)
+        event["logger"] = record.name
+        event["logentry"] = {"message": to_string(record.msg), "params": record.args}
+        event["extra"] = _extra_from_record(record)
+
+        hub.capture_event(event, hint=hint)
+
+
+# Legacy name
+SentryHandler = EventHandler
+
+
+class BreadcrumbHandler(logging.Handler, object):
+    """
+    A logging handler that records breadcrumbs for each log record.
+
+    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+    """
+
+    def emit(self, record):
+        # type: (LogRecord) -> Any
+        with capture_internal_exceptions():
+            self.format(record)
+            return self._emit(record)
+
+    def _emit(self, record):
+        # type: (LogRecord) -> None
+        if not _can_record(record):
+            return
+
+        Hub.current.add_breadcrumb(
+            _breadcrumb_from_record(record), hint={"log_record": record}
+        )
diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py
new file mode 100644
index 0000000..aecffd0
--- /dev/null
+++ b/sentry_sdk/integrations/modules.py
@@ -0,0 +1,56 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Tuple
+    from typing import Iterator
+
+    from sentry_sdk._types import Event
+
+
+_installed_modules = None
+
+
+def _generate_installed_modules():
+    # type: () -> Iterator[Tuple[str, str]]
+    try:
+        import pkg_resources
+    except ImportError:
+        return
+
+    for info in pkg_resources.working_set:
+        yield info.key, info.version
+
+
+def _get_installed_modules():
+    # type: () -> Dict[str, str]
+    global _installed_modules
+    if _installed_modules is None:
+        _installed_modules = dict(_generate_installed_modules())
+    return _installed_modules
+
+
+class ModulesIntegration(Integration):
+    identifier = "modules"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Any) -> Dict[str, Any]
+            if event.get("type") == "transaction":
+                return event
+
+            if Hub.current.get_integration(ModulesIntegration) is None:
+                return event
+
+            event["modules"] = dict(_get_installed_modules())
+            return event
diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py
new file mode 100644
index 0000000..4626db6
--- /dev/null
+++ b/sentry_sdk/integrations/pyramid.py
@@ -0,0 +1,211 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import weakref
+
+from pyramid.httpexceptions import HTTPException  # type: ignore
+from pyramid.request import Request  # type: ignore
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk._compat import reraise, iteritems
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from pyramid.response import Response  # type: ignore
+    from typing import Any
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from webob.cookies import RequestCookies  # type: ignore
+    from webob.compat import cgi_FieldStorage  # type: ignore
+
+    from sentry_sdk.utils import ExcInfo
+
+
+if getattr(Request, "authenticated_userid", None):
+
+    def authenticated_userid(request):
+        # type: (Request) -> Optional[Any]
+        return request.authenticated_userid
+
+
+else:
+    # bw-compat for pyramid < 1.5
+    from pyramid.security import authenticated_userid  # type: ignore
+
+
+class PyramidIntegration(Integration):
+    identifier = "pyramid"
+
+    transaction_style = None
+
+    def __init__(self, transaction_style="route_name"):
+        # type: (str) -> None
+        TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        from pyramid.router import Router  # type: ignore
+        from pyramid.request import Request  # type: ignore
+
+        old_handle_request = Router.handle_request
+
+        def sentry_patched_handle_request(self, request, *args, **kwargs):
+            # type: (Any, Request, *Any, **Any) -> Response
+            hub = Hub.current
+            integration = hub.get_integration(PyramidIntegration)
+            if integration is not None:
+                with hub.configure_scope() as scope:
+                    scope.add_event_processor(
+                        _make_event_processor(weakref.ref(request), integration)
+                    )
+
+            return old_handle_request(self, request, *args, **kwargs)
+
+        Router.handle_request = sentry_patched_handle_request
+
+        if hasattr(Request, "invoke_exception_view"):
+            old_invoke_exception_view = Request.invoke_exception_view
+
+            def sentry_patched_invoke_exception_view(self, *args, **kwargs):
+                rv = old_invoke_exception_view(self, *args, **kwargs)
+
+                if (
+                    self.exc_info
+                    and all(self.exc_info)
+                    and rv.status_int == 500
+                    and Hub.current.get_integration(PyramidIntegration) is not None
+                ):
+                    _capture_exception(self.exc_info)
+
+                return rv
+
+            Request.invoke_exception_view = sentry_patched_invoke_exception_view
+
+        old_wsgi_call = Router.__call__
+
+        def sentry_patched_wsgi_call(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable) -> _ScopedResponse
+            hub = Hub.current
+            integration = hub.get_integration(PyramidIntegration)
+            if integration is None:
+                return old_wsgi_call(self, environ, start_response)
+
+            def sentry_patched_inner_wsgi_call(environ, start_response):
+                try:
+                    return old_wsgi_call(self, environ, start_response)
+                except Exception:
+                    einfo = sys.exc_info()
+                    _capture_exception(einfo)
+                    reraise(*einfo)
+
+            return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)(
+                environ, start_response
+            )
+
+        Router.__call__ = sentry_patched_wsgi_call
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
+        return
+    hub = Hub.current
+    if hub.get_integration(PyramidIntegration) is None:
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "pyramid", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+class PyramidRequestExtractor(RequestExtractor):
+    def url(self):
+        return self.request.path_url
+
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> RequestCookies
+        return self.request.cookies
+
+    def raw_data(self):
+        # type: () -> str
+        return self.request.text
+
+    def form(self):
+        # type: () -> Dict[str, str]
+        return {
+            key: value
+            for key, value in iteritems(self.request.POST)
+            if not getattr(value, "filename", None)
+        }
+
+    def files(self):
+        # type: () -> Dict[str, cgi_FieldStorage]
+        return {
+            key: value
+            for key, value in iteritems(self.request.POST)
+            if getattr(value, "filename", None)
+        }
+
+    def size_of_file(self, postdata):
+        # type: (cgi_FieldStorage) -> int
+        file = postdata.file
+        try:
+            return os.fstat(file.fileno()).st_size
+        except Exception:
+            return 0
+
+
+def _make_event_processor(weak_request, integration):
+    # type: (Callable[[], Request], PyramidIntegration) -> Callable
+    def event_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        request = weak_request()
+        if request is None:
+            return event
+
+        try:
+            if integration.transaction_style == "route_name":
+                event["transaction"] = request.matched_route.name
+            elif integration.transaction_style == "route_pattern":
+                event["transaction"] = request.matched_route.pattern
+        except Exception:
+            pass
+
+        with capture_internal_exceptions():
+            PyramidRequestExtractor(request).extract_into_event(event)
+
+        if _should_send_default_pii():
+            with capture_internal_exceptions():
+                user_info = event.setdefault("user", {})
+                user_info["id"] = authenticated_userid(request)
+
+        return event
+
+    return event_processor
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
new file mode 100644
index 0000000..ef796bf
--- /dev/null
+++ b/sentry_sdk/integrations/redis.py
@@ -0,0 +1,46 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.integrations import Integration
+
+
+class RedisIntegration(Integration):
+    identifier = "redis"
+
+    @staticmethod
+    def setup_once():
+        import redis
+
+        old_execute_command = redis.StrictRedis.execute_command
+
+        def sentry_patched_execute_command(self, name, *args, **kwargs):
+            hub = Hub.current
+
+            if hub.get_integration(RedisIntegration) is None:
+                return old_execute_command(self, name, *args, **kwargs)
+
+            description = name
+
+            with capture_internal_exceptions():
+                description_parts = [name]
+                for i, arg in enumerate(args):
+                    if i > 10:
+                        break
+
+                    description_parts.append(repr(arg))
+
+                description = " ".join(description_parts)
+
+            with hub.start_span(op="redis", description=description) as span:
+                if name:
+                    span.set_tag("redis.command", name)
+
+                if name and args and name.lower() in ("get", "set", "setex", "setnx"):
+                    span.set_tag("redis.key", args[0])
+
+                return old_execute_command(self, name, *args, **kwargs)
+
+        redis.StrictRedis.execute_command = (  # type: ignore
+            sentry_patched_execute_command
+        )
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
new file mode 100644
index 0000000..a32ec57
--- /dev/null
+++ b/sentry_sdk/integrations/rq.py
@@ -0,0 +1,134 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+from rq.timeouts import JobTimeoutException  # type: ignore
+from rq.worker import Worker  # type: ignore
+from rq.queue import Queue  # type: ignore
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Callable
+
+    from rq.job import Job  # type: ignore
+
+    from sentry_sdk.utils import ExcInfo
+
+
+class RqIntegration(Integration):
+    identifier = "rq"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        old_perform_job = Worker.perform_job
+
+        def sentry_patched_perform_job(self, job, *args, **kwargs):
+            # type: (Any, Job, *Queue, **Any) -> bool
+            hub = Hub.current
+            integration = hub.get_integration(RqIntegration)
+
+            if integration is None:
+                return old_perform_job(self, job, *args, **kwargs)
+
+            client = hub.client
+            assert client is not None
+
+            with hub.push_scope() as scope:
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(_make_event_processor(weakref.ref(job)))
+
+                span = Span.continue_from_headers(
+                    job.meta.get("_sentry_trace_headers") or {}
+                )
+                span.op = "rq.task"
+
+                with capture_internal_exceptions():
+                    span.transaction = job.func_name
+
+                with hub.start_span(span):
+                    rv = old_perform_job(self, job, *args, **kwargs)
+
+            if self.is_horse:
+                # We're inside of a forked process and RQ is
+                # about to call `os._exit`. Make sure that our
+                # events get sent out.
+                client.flush()
+
+            return rv
+
+        Worker.perform_job = sentry_patched_perform_job
+
+        old_handle_exception = Worker.handle_exception
+
+        def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
+            _capture_exception(exc_info)  # type: ignore
+            return old_handle_exception(self, job, *exc_info, **kwargs)
+
+        Worker.handle_exception = sentry_patched_handle_exception
+
+        old_enqueue_job = Queue.enqueue_job
+
+        def sentry_patched_enqueue_job(self, job, **kwargs):
+            hub = Hub.current
+            if hub.get_integration(RqIntegration) is not None:
+                job.meta["_sentry_trace_headers"] = dict(
+                    hub.iter_trace_propagation_headers()
+                )
+
+            return old_enqueue_job(self, job, **kwargs)
+
+        Queue.enqueue_job = sentry_patched_enqueue_job
+
+
+def _make_event_processor(weak_job):
+    # type: (Callable[[], Job]) -> Callable
+    def event_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        job = weak_job()
+        if job is not None:
+            with capture_internal_exceptions():
+                extra = event.setdefault("extra", {})
+                extra["rq-job"] = {
+                    "job_id": job.id,
+                    "func": job.func_name,
+                    "args": job.args,
+                    "kwargs": job.kwargs,
+                    "description": job.description,
+                }
+
+        if "exc_info" in hint:
+            with capture_internal_exceptions():
+                if issubclass(hint["exc_info"][0], JobTimeoutException):
+                    event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exc_info, **kwargs):
+    # type: (ExcInfo, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(RqIntegration) is None:
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "rq", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py
new file mode 100644
index 0000000..62e8cd2
--- /dev/null
+++ b/sentry_sdk/integrations/sanic.py
@@ -0,0 +1,218 @@
+import sys
+import weakref
+from inspect import isawaitable
+
+from sentry_sdk._compat import urlparse, reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    HAS_REAL_CONTEXTVARS,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
+from sentry_sdk.integrations.logging import ignore_logger
+
+from sanic import Sanic, __version__ as VERSION  # type: ignore
+from sanic.exceptions import SanicException  # type: ignore
+from sanic.router import Router  # type: ignore
+from sanic.handlers import ErrorHandler  # type: ignore
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import Optional
+    from typing import Union
+    from typing import Tuple
+
+    from sanic.request import Request, RequestParameters  # type: ignore
+
+    from sentry_sdk._types import Event, EventProcessor, Hint
+
+
+class SanicIntegration(Integration):
+    identifier = "sanic"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        if not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise RuntimeError(
+                "The sanic integration for Sentry requires Python 3.7+ "
+                " or aiocontextvars package"
+            )
+
+        if VERSION.startswith("0.8."):
+            # Sanic 0.8 and older creates a logger named "root" and puts a
+            # stringified version of every exception in there (without exc_info),
+            # which our error deduplication can't detect.
+            #
+            # We explicitly check the version here because it is a very
+            # invasive step to ignore this logger and not necessary in newer
+            # versions at all.
+            #
+            # https://github.com/huge-success/sanic/issues/1332
+            ignore_logger("root")
+
+        old_handle_request = Sanic.handle_request
+
+        async def sentry_handle_request(self, request, *args, **kwargs):
+            # type: (Any, Request, *Any, **Any) -> Any
+            hub = Hub.current
+            if hub.get_integration(SanicIntegration) is None:
+                return old_handle_request(self, request, *args, **kwargs)
+
+            weak_request = weakref.ref(request)
+
+            with Hub(hub) as hub:
+                with hub.configure_scope() as scope:
+                    scope.clear_breadcrumbs()
+                    scope.add_event_processor(_make_request_processor(weak_request))
+
+                response = old_handle_request(self, request, *args, **kwargs)
+                if isawaitable(response):
+                    response = await response
+
+                return response
+
+        Sanic.handle_request = sentry_handle_request
+
+        old_router_get = Router.get
+
+        def sentry_router_get(self, request):
+            # type: (Any, Request) -> Any
+            rv = old_router_get(self, request)
+            hub = Hub.current
+            if hub.get_integration(SanicIntegration) is not None:
+                with capture_internal_exceptions():
+                    with hub.configure_scope() as scope:
+                        scope.transaction = rv[0].__name__
+            return rv
+
+        Router.get = sentry_router_get
+
+        old_error_handler_lookup = ErrorHandler.lookup
+
+        def sentry_error_handler_lookup(self, exception):
+            # type: (Any, Exception) -> Optional[Callable]
+            _capture_exception(exception)
+            old_error_handler = old_error_handler_lookup(self, exception)
+
+            if old_error_handler is None:
+                return None
+
+            if Hub.current.get_integration(SanicIntegration) is None:
+                return old_error_handler
+
+            async def sentry_wrapped_error_handler(request, exception):
+                # type: (Request, Exception) -> Any
+                try:
+                    response = old_error_handler(request, exception)
+                    if isawaitable(response):
+                        response = await response
+                    return response
+                except Exception:
+                    # Report errors that occur in Sanic error handler. These
+                    # exceptions will not even show up in Sanic's
+                    # `sanic.exceptions` logger.
+                    exc_info = sys.exc_info()
+                    _capture_exception(exc_info)
+                    reraise(*exc_info)
+
+            return sentry_wrapped_error_handler
+
+        ErrorHandler.lookup = sentry_error_handler_lookup
+
+
+def _capture_exception(exception):
+    # type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None
+    hub = Hub.current
+    integration = hub.get_integration(SanicIntegration)
+    if integration is None:
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    with capture_internal_exceptions():
+        event, hint = event_from_exception(
+            exception,
+            client_options=client.options,
+            mechanism={"type": "sanic", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+
+def _make_request_processor(weak_request):
+    # type: (Callable[[], Request]) -> EventProcessor
+    def sanic_processor(event, hint):
+        # type: (Event, Optional[Hint]) -> Optional[Event]
+
+        try:
+            if hint and issubclass(hint["exc_info"][0], SanicException):
+                return None
+        except KeyError:
+            pass
+
+        request = weak_request()
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            extractor = SanicRequestExtractor(request)
+            extractor.extract_into_event(event)
+
+            request_info = event["request"]
+            urlparts = urlparse.urlsplit(request.url)
+
+            request_info["url"] = "%s://%s%s" % (
+                urlparts.scheme,
+                urlparts.netloc,
+                urlparts.path,
+            )
+
+            request_info["query_string"] = urlparts.query
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+        return event
+
+    return sanic_processor
+
+
+class SanicRequestExtractor(RequestExtractor):
+    def content_length(self):
+        # type: () -> int
+        if self.request.body is None:
+            return 0
+        return len(self.request.body)
+
+    def cookies(self):
+        return dict(self.request.cookies)
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> RequestParameters
+        return self.request.form
+
+    def is_json(self):
+        raise NotImplementedError()
+
+    def json(self):
+        # type: () -> Optional[Any]
+        return self.request.json
+
+    def files(self):
+        # type: () -> RequestParameters
+        return self.request.files
+
+    def size_of_file(self, file):
+        return len(file.body or ())
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
new file mode 100644
index 0000000..0e20d73
--- /dev/null
+++ b/sentry_sdk/integrations/serverless.py
@@ -0,0 +1,50 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import event_from_exception
+from sentry_sdk._compat import reraise
+
+
+def serverless_function(f=None, flush=True):
+    def wrapper(f):
+        @functools.wraps(f)
+        def inner(*args, **kwargs):
+            with Hub(Hub.current) as hub:
+                with hub.configure_scope() as scope:
+                    scope.clear_breadcrumbs()
+
+                try:
+                    return f(*args, **kwargs)
+                except Exception:
+                    _capture_and_reraise()
+                finally:
+                    if flush:
+                        _flush_client()
+
+        return inner
+
+    if f is None:
+        return wrapper
+    else:
+        return wrapper(f)
+
+
+def _capture_and_reraise():
+    exc_info = sys.exc_info()
+    hub = Hub.current
+    if hub is not None and hub.client is not None:
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=hub.client.options,
+            mechanism={"type": "serverless", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    reraise(*exc_info)
+
+
+def _flush_client():
+    hub = Hub.current
+    if hub is not None:
+        hub.flush()
diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
new file mode 100644
index 0000000..882498a
--- /dev/null
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -0,0 +1,69 @@
+from __future__ import absolute_import
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.tracing import record_sql_queries
+
+from sqlalchemy.engine import Engine  # type: ignore
+from sqlalchemy.event import listen  # type: ignore
+
+if MYPY:
+    from typing import Any
+    from typing import ContextManager
+    from typing import Optional
+
+    from sentry_sdk.tracing import Span
+
+
+class SqlalchemyIntegration(Integration):
+    identifier = "sqlalchemy"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        listen(Engine, "before_cursor_execute", _before_cursor_execute)
+        listen(Engine, "after_cursor_execute", _after_cursor_execute)
+        listen(Engine, "dbapi_error", _dbapi_error)
+
+
+def _before_cursor_execute(
+    conn, cursor, statement, parameters, context, executemany, *args
+):
+    # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
+    hub = Hub.current
+    if hub.get_integration(SqlalchemyIntegration) is None:
+        return
+
+    ctx_mgr = record_sql_queries(
+        hub,
+        cursor,
+        statement,
+        parameters,
+        paramstyle=context and context.dialect and context.dialect.paramstyle or None,
+        executemany=executemany,
+    )
+    conn._sentry_sql_span_manager = ctx_mgr
+
+    span = ctx_mgr.__enter__()
+
+    if span is not None:
+        conn._sentry_sql_span = span
+
+
+def _after_cursor_execute(conn, cursor, statement, *args):
+    # type: (Any, Any, Any, *Any) -> None
+    ctx_mgr = getattr(conn, "_sentry_sql_span_manager", None)  # type: ContextManager
+
+    if ctx_mgr is not None:
+        conn._sentry_sql_span_manager = None
+        ctx_mgr.__exit__(None, None, None)
+
+
+def _dbapi_error(conn, *args):
+    # type: (Any, *Any) -> None
+    span = getattr(conn, "_sentry_sql_span", None)  # type: Optional[Span]
+
+    if span is not None:
+        span.set_failure()
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
new file mode 100644
index 0000000..032da6a
--- /dev/null
+++ b/sentry_sdk/integrations/stdlib.py
@@ -0,0 +1,224 @@
+import os
+import subprocess
+import sys
+import platform
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import EnvironHeaders, record_http_request
+from sentry_sdk.utils import capture_internal_exceptions, safe_repr
+
+
+try:
+    from httplib import HTTPConnection  # type: ignore
+except ImportError:
+    from http.client import HTTPConnection
+
+
+_RUNTIME_CONTEXT = {
+    "name": platform.python_implementation(),
+    "version": "%s.%s.%s" % (sys.version_info[:3]),
+    "build": sys.version,
+}
+
+
+class StdlibIntegration(Integration):
+    identifier = "stdlib"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _install_httplib()
+        _install_subprocess()
+
+        @add_global_event_processor
+        def add_python_runtime_context(event, hint):
+            if Hub.current.get_integration(StdlibIntegration) is not None:
+                contexts = event.setdefault("contexts", {})
+                if isinstance(contexts, dict) and "runtime" not in contexts:
+                    contexts["runtime"] = _RUNTIME_CONTEXT
+
+            return event
+
+
+def _install_httplib():
+    # type: () -> None
+    real_putrequest = HTTPConnection.putrequest
+    real_getresponse = HTTPConnection.getresponse
+
+    def putrequest(self, method, url, *args, **kwargs):
+        hub = Hub.current
+        if hub.get_integration(StdlibIntegration) is None:
+            return real_putrequest(self, method, url, *args, **kwargs)
+
+        host = self.host
+        port = self.port
+        default_port = self.default_port
+
+        real_url = url
+        if not real_url.startswith(("http://", "https://")):
+            real_url = "%s://%s%s%s" % (
+                default_port == 443 and "https" or "http",
+                host,
+                port != default_port and ":%s" % port or "",
+                url,
+            )
+
+        recorder = record_http_request(hub, real_url, method)
+        data_dict = recorder.__enter__()
+
+        try:
+            rv = real_putrequest(self, method, url, *args, **kwargs)
+
+            for key, value in hub.iter_trace_propagation_headers():
+                self.putheader(key, value)
+        except Exception:
+            recorder.__exit__(*sys.exc_info())
+            raise
+
+        self._sentrysdk_recorder = recorder
+        self._sentrysdk_data_dict = data_dict
+
+        return rv
+
+    def getresponse(self, *args, **kwargs):
+        recorder = getattr(self, "_sentrysdk_recorder", None)
+
+        if recorder is None:
+            return real_getresponse(self, *args, **kwargs)
+
+        data_dict = getattr(self, "_sentrysdk_data_dict", None)
+
+        try:
+            rv = real_getresponse(self, *args, **kwargs)
+
+            if data_dict is not None:
+                data_dict["httplib_response"] = rv
+                data_dict["status_code"] = rv.status
+                data_dict["reason"] = rv.reason
+        except TypeError:
+            # python-requests provokes a typeerror to discover py3 vs py2 differences
+            #
+            # > TypeError("getresponse() got an unexpected keyword argument 'buffering'")
+            raise
+        except Exception:
+            recorder.__exit__(*sys.exc_info())
+            raise
+        else:
+            recorder.__exit__(None, None, None)
+
+        return rv
+
+    HTTPConnection.putrequest = putrequest
+    HTTPConnection.getresponse = getresponse
+
+
+def _init_argument(args, kwargs, name, position, setdefault_callback=None):
+    """
+    given (*args, **kwargs) of a function call, retrieve (and optionally set a
+    default for) an argument by either name or position.
+
+    This is useful for wrapping functions with complex type signatures and
+    extracting a few arguments without needing to redefine that function's
+    entire type signature.
+    """
+
+    if name in kwargs:
+        rv = kwargs[name]
+        if setdefault_callback is not None:
+            rv = setdefault_callback(rv)
+        if rv is not None:
+            kwargs[name] = rv
+    elif position < len(args):
+        rv = args[position]
+        if setdefault_callback is not None:
+            rv = setdefault_callback(rv)
+        if rv is not None:
+            args[position] = rv
+    else:
+        rv = setdefault_callback and setdefault_callback(None)
+        if rv is not None:
+            kwargs[name] = rv
+
+    return rv
+
+
+def _install_subprocess():
+    old_popen_init = subprocess.Popen.__init__
+
+    def sentry_patched_popen_init(self, *a, **kw):
+        hub = Hub.current
+        if hub.get_integration(StdlibIntegration) is None:
+            return old_popen_init(self, *a, **kw)
+
+        # Convert from tuple to list to be able to set values.
+        a = list(a)
+
+        args = _init_argument(a, kw, "args", 0) or []
+        cwd = _init_argument(a, kw, "cwd", 9)
+
+        # if args is not a list or tuple (and e.g. some iterator instead),
+        # let's not use it at all. There are too many things that can go wrong
+        # when trying to collect an iterator into a list and setting that list
+        # into `a` again.
+        #
+        # Also invocations where `args` is not a sequence are not actually
+        # legal. They just happen to work under CPython.
+        description = None
+
+        if isinstance(args, (list, tuple)) and len(args) < 100:
+            with capture_internal_exceptions():
+                description = " ".join(map(str, args))
+
+        if description is None:
+            description = safe_repr(args)
+
+        env = None
+
+        for k, v in hub.iter_trace_propagation_headers():
+            if env is None:
+                env = _init_argument(a, kw, "env", 10, lambda x: dict(x or os.environ))
+            env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
+
+        with hub.start_span(op="subprocess", description=description) as span:
+            span.set_data("subprocess.cwd", cwd)
+
+            rv = old_popen_init(self, *a, **kw)
+
+            span.set_tag("subprocess.pid", self.pid)
+            return rv
+
+    subprocess.Popen.__init__ = sentry_patched_popen_init  # type: ignore
+
+    old_popen_wait = subprocess.Popen.wait
+
+    def sentry_patched_popen_wait(self, *a, **kw):
+        hub = Hub.current
+
+        if hub.get_integration(StdlibIntegration) is None:
+            return old_popen_wait(self, *a, **kw)
+
+        with hub.start_span(op="subprocess.wait") as span:
+            span.set_tag("subprocess.pid", self.pid)
+            return old_popen_wait(self, *a, **kw)
+
+    subprocess.Popen.wait = sentry_patched_popen_wait  # type: ignore
+
+    old_popen_communicate = subprocess.Popen.communicate
+
+    def sentry_patched_popen_communicate(self, *a, **kw):
+        hub = Hub.current
+
+        if hub.get_integration(StdlibIntegration) is None:
+            return old_popen_communicate(self, *a, **kw)
+
+        with hub.start_span(op="subprocess.communicate") as span:
+            span.set_tag("subprocess.pid", self.pid)
+            return old_popen_communicate(self, *a, **kw)
+
+    subprocess.Popen.communicate = sentry_patched_popen_communicate  # type: ignore
+
+
+def get_subprocess_traceparent_headers():
+    return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py
new file mode 100644
index 0000000..34503a7
--- /dev/null
+++ b/sentry_sdk/integrations/threading.py
@@ -0,0 +1,76 @@
+from __future__ import absolute_import
+
+import sys
+from threading import Thread, current_thread
+
+from sentry_sdk import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import MYPY
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import event_from_exception
+
+if MYPY:
+    from typing import Any
+
+
+class ThreadingIntegration(Integration):
+    identifier = "threading"
+
+    def __init__(self, propagate_hub=False):
+        self.propagate_hub = propagate_hub
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        old_start = Thread.start
+
+        def sentry_start(self, *a, **kw):
+            hub = Hub.current
+            integration = hub.get_integration(ThreadingIntegration)
+            if integration is not None:
+                if not integration.propagate_hub:
+                    hub_ = None
+                else:
+                    hub_ = Hub(hub)
+                # Patching instance methods in `start()` creates a reference cycle if
+                # done in a naive way. See
+                # https://github.com/getsentry/sentry-python/pull/434
+                #
+                # In threading module, using current_thread API will access current thread instance
+                # without holding it to avoid a reference cycle in an easier way.
+                self.run = _wrap_run(hub_, self.run.__func__)
+
+            return old_start(self, *a, **kw)  # type: ignore
+
+        Thread.start = sentry_start  # type: ignore
+
+
+def _wrap_run(parent_hub, old_run_func):
+    def run(*a, **kw):
+        hub = parent_hub or Hub.current
+        with hub:
+            try:
+                self = current_thread()
+                return old_run_func(self, *a, **kw)
+            except Exception:
+                reraise(*_capture_exception())
+
+    return run
+
+
+def _capture_exception():
+    hub = Hub.current
+    exc_info = sys.exc_info()
+
+    if hub.get_integration(ThreadingIntegration) is not None:
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "threading", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    return exc_info
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
new file mode 100644
index 0000000..eaa6806
--- /dev/null
+++ b/sentry_sdk/integrations/tornado.py
@@ -0,0 +1,197 @@
+import weakref
+from inspect import iscoroutinefunction
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+    HAS_REAL_CONTEXTVARS,
+    event_from_exception,
+    capture_internal_exceptions,
+    transaction_from_function,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import (
+    RequestExtractor,
+    _filter_headers,
+    _is_json_content_type,
+)
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._compat import iteritems
+
+from tornado.web import RequestHandler, HTTPError  # type: ignore
+from tornado.gen import coroutine  # type: ignore
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import List
+    from typing import Optional
+    from typing import Dict
+    from typing import Callable
+
+
+class TornadoIntegration(Integration):
+    identifier = "tornado"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import tornado  # type: ignore
+
+        tornado_version = getattr(tornado, "version_info", None)
+        if tornado_version is None or tornado_version < (5, 0):
+            raise RuntimeError("Tornado 5+ required")
+
+        if not HAS_REAL_CONTEXTVARS:
+            # Tornado is async. We better have contextvars or we're going to leak
+            # state between requests.
+            raise RuntimeError(
+                "The tornado integration for Sentry requires Python 3.6+ or the aiocontextvars package"
+            )
+
+        ignore_logger("tornado.application")
+        ignore_logger("tornado.access")
+
+        old_execute = RequestHandler._execute
+
+        awaitable = iscoroutinefunction(old_execute)
+
+        if awaitable:
+            # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
+            # In that case our method should be a coroutine function too
+            async def sentry_execute_request_handler(self, *args, **kwargs):
+                # type: (Any, *List, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(TornadoIntegration)
+                if integration is None:
+                    return await old_execute(self, *args, **kwargs)
+
+                weak_handler = weakref.ref(self)
+
+                with Hub(hub) as hub:
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_event_processor(weak_handler))
+                    return await old_execute(self, *args, **kwargs)
+
+        else:
+
+            @coroutine  # type: ignore
+            def sentry_execute_request_handler(self, *args, **kwargs):
+                hub = Hub.current
+                integration = hub.get_integration(TornadoIntegration)
+                if integration is None:
+                    return old_execute(self, *args, **kwargs)
+
+                weak_handler = weakref.ref(self)
+
+                with Hub(hub) as hub:
+                    with hub.configure_scope() as scope:
+                        scope.add_event_processor(_make_event_processor(weak_handler))
+                    result = yield from old_execute(self, *args, **kwargs)
+                    return result
+
+        RequestHandler._execute = sentry_execute_request_handler
+
+        old_log_exception = RequestHandler.log_exception
+
+        def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
+            # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
+            _capture_exception(ty, value, tb)
+            return old_log_exception(self, ty, value, tb, *args, **kwargs)
+
+        RequestHandler.log_exception = sentry_log_exception
+
+
+def _capture_exception(ty, value, tb):
+    # type: (type, BaseException, Any) -> None
+    hub = Hub.current
+    if hub.get_integration(TornadoIntegration) is None:
+        return
+    if isinstance(value, HTTPError):
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        (ty, value, tb),
+        client_options=client.options,
+        mechanism={"type": "tornado", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+def _make_event_processor(weak_handler):
+    # type: (Callable[[], RequestHandler]) -> Callable
+    def tornado_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        handler = weak_handler()
+        if handler is None:
+            return event
+
+        request = handler.request
+
+        with capture_internal_exceptions():
+            method = getattr(handler, handler.request.method.lower())
+            event["transaction"] = transaction_from_function(method)
+
+        with capture_internal_exceptions():
+            extractor = TornadoRequestExtractor(request)
+            extractor.extract_into_event(event)
+
+            request_info = event["request"]
+
+            request_info["url"] = "%s://%s%s" % (
+                request.protocol,
+                request.host,
+                request.path,
+            )
+
+            request_info["query_string"] = request.query
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+        with capture_internal_exceptions():
+            if handler.current_user and _should_send_default_pii():
+                event.setdefault("user", {})["is_authenticated"] = True
+
+        return event
+
+    return tornado_processor
+
+
+class TornadoRequestExtractor(RequestExtractor):
+    def content_length(self):
+        # type: () -> int
+        if self.request.body is None:
+            return 0
+        return len(self.request.body)
+
+    def cookies(self):
+        # type: () -> Dict
+        return {k: v.value for k, v in iteritems(self.request.cookies)}
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> Optional[Any]
+        return {
+            k: [v.decode("latin1", "replace") for v in vs]
+            for k, vs in iteritems(self.request.body_arguments)
+        }
+
+    def is_json(self):
+        # type: () -> bool
+        return _is_json_content_type(self.request.headers.get("content-type"))
+
+    def files(self):
+        # type: () -> Dict
+        return {k: v[0] for k, v in iteritems(self.request.files) if v}
+
+    def size_of_file(self, file):
+        return len(file.body or ())
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
new file mode 100644
index 0000000..0c06157
--- /dev/null
+++ b/sentry_sdk/integrations/wsgi.py
@@ -0,0 +1,290 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+    ContextVar,
+    capture_internal_exceptions,
+    event_from_exception,
+)
+from sentry_sdk._compat import PY2, reraise, iteritems
+from sentry_sdk.tracing import Span
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Callable
+    from typing import Dict
+    from typing import List
+    from typing import Iterator
+    from typing import Any
+    from typing import Tuple
+    from typing import Optional
+    from typing import TypeVar
+
+    from sentry_sdk.utils import ExcInfo
+
+    T = TypeVar("T")
+    U = TypeVar("U")
+    E = TypeVar("E")
+
+
+_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
+
+
+if PY2:
+
+    def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+        # type: (str, str, str) -> str
+        return s.decode(charset, errors)
+
+
+else:
+
+    def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+        # type: (str, str, str) -> str
+        return s.encode("latin1").decode(charset, errors)
+
+
+def get_host(environ):
+    # type: (Dict[str, str]) -> str
+    """Return the host for the given WSGI environment. Yanked from Werkzeug."""
+    if environ.get("HTTP_HOST"):
+        rv = environ["HTTP_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("SERVER_NAME"):
+        rv = environ["SERVER_NAME"]
+        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
+            ("https", "443"),
+            ("http", "80"),
+        ):
+            rv += ":" + environ["SERVER_PORT"]
+    else:
+        # In spite of the WSGI spec, SERVER_NAME might not be present.
+        rv = "unknown"
+
+    return rv
+
+
+def get_request_url(environ):
+    # type: (Dict[str, str]) -> str
+    """Return the absolute URL without query string for the given WSGI
+    environment."""
+    return "%s://%s/%s" % (
+        environ.get("wsgi.url_scheme"),
+        get_host(environ),
+        wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"),
+    )
+
+
+class SentryWsgiMiddleware(object):
+    __slots__ = ("app",)
+
+    def __init__(self, app):
+        # type: (Callable) -> None
+        self.app = app
+
+    def __call__(self, environ, start_response):
+        # type: (Dict[str, str], Callable) -> _ScopedResponse
+        if _wsgi_middleware_applied.get(False):
+            return self.app(environ, start_response)
+
+        _wsgi_middleware_applied.set(True)
+        try:
+            hub = Hub(Hub.current)
+
+            with hub:
+                with capture_internal_exceptions():
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope._name = "wsgi"
+                        scope.add_event_processor(_make_wsgi_event_processor(environ))
+
+                span = Span.continue_from_environ(environ)
+                span.op = "http.server"
+                span.transaction = "generic WSGI request"
+
+                with hub.start_span(span) as span:
+                    try:
+                        rv = self.app(
+                            environ,
+                            functools.partial(
+                                _sentry_start_response, start_response, span
+                            ),
+                        )
+                    except BaseException:
+                        reraise(*_capture_exception(hub))
+        finally:
+            _wsgi_middleware_applied.set(False)
+
+        return _ScopedResponse(hub, rv)
+
+
+def _sentry_start_response(
+    old_start_response, span, status, response_headers, exc_info=None
+):
+    # type: (Callable[[str, U, Optional[E]], T], Span, str, U, Optional[E]) -> T
+    with capture_internal_exceptions():
+        status_int = int(status.split(" ", 1)[0])
+        span.set_tag("http.status_code", status_int)
+        if 500 <= status_int < 600:
+            span.set_failure()
+
+    return old_start_response(status, response_headers, exc_info)
+
+
+def _get_environ(environ):
+    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+    """
+    Returns our whitelisted environment variables.
+    """
+    keys = ["SERVER_NAME", "SERVER_PORT"]
+    if _should_send_default_pii():
+        # make debugging of proxy setup easier. Proxy headers are
+        # in headers.
+        keys += ["REMOTE_ADDR"]
+
+    for key in keys:
+        if key in environ:
+            yield key, environ[key]
+
+
+# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
+#
+# We need this function because Django does not give us a "pure" http header
+# dict. So we might as well use it for all WSGI integrations.
+def _get_headers(environ):
+    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+    """
+    Returns only proper HTTP headers.
+
+    """
+    for key, value in iteritems(environ):
+        key = str(key)
+        if key.startswith("HTTP_") and key not in (
+            "HTTP_CONTENT_TYPE",
+            "HTTP_CONTENT_LENGTH",
+        ):
+            yield key[5:].replace("_", "-").title(), value
+        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+            yield key.replace("_", "-").title(), value
+
+
+def get_client_ip(environ):
+    # type: (Dict[str, str]) -> Optional[Any]
+    """
+    Infer the user IP address from various headers. This cannot be used in
+    security sensitive situations since the value may be forged from a client,
+    but it's good enough for the event payload.
+    """
+    try:
+        return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
+    except (KeyError, IndexError):
+        pass
+
+    try:
+        return environ["HTTP_X_REAL_IP"]
+    except KeyError:
+        pass
+
+    return environ.get("REMOTE_ADDR")
+
+
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+
+    # Check client here as it might have been unset while streaming response
+    if hub.client is not None:
+        e = exc_info[1]
+
+        # SystemExit(0) is the only uncaught exception that is expected behavior
+        should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
+        if not should_skip_capture:
+            event, hint = event_from_exception(
+                exc_info,
+                client_options=hub.client.options,
+                mechanism={"type": "wsgi", "handled": False},
+            )
+            hub.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class _ScopedResponse(object):
+    __slots__ = ("_response", "_hub")
+
+    def __init__(self, hub, response):
+        # type: (Hub, List[bytes]) -> None
+        self._hub = hub
+        self._response = response
+
+    def __iter__(self):
+        # type: () -> Iterator[bytes]
+        iterator = iter(self._response)
+
+        while True:
+            with self._hub:
+                try:
+                    chunk = next(iterator)
+                except StopIteration:
+                    break
+                except BaseException:
+                    reraise(*_capture_exception(self._hub))
+
+            yield chunk
+
+    def close(self):
+        with self._hub:
+            try:
+                self._response.close()
+            except AttributeError:
+                pass
+            except BaseException:
+                reraise(*_capture_exception(self._hub))
+
+
+def _make_wsgi_event_processor(environ):
+    # type: (Dict[str, str]) -> Callable
+    # It's a bit unfortunate that we have to extract and parse the request data
+    # from the environ so eagerly, but there are a few good reasons for this.
+    #
+    # We might be in a situation where the scope/hub never gets torn down
+    # properly. In that case we will have an unnecessary strong reference to
+    # all objects in the environ (some of which may take a lot of memory) when
+    # we're really just interested in a few of them.
+    #
+    # Keeping the environment around for longer than the request lifecycle is
+    # also not necessarily something uWSGI can deal with:
+    # https://github.com/unbit/uwsgi/issues/1950
+
+    client_ip = get_client_ip(environ)
+    request_url = get_request_url(environ)
+    query_string = environ.get("QUERY_STRING")
+    method = environ.get("REQUEST_METHOD")
+    env = dict(_get_environ(environ))
+    headers = _filter_headers(dict(_get_headers(environ)))
+
+    def event_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        with capture_internal_exceptions():
+            # if the code below fails halfway through we at least have some data
+            request_info = event.setdefault("request", {})
+
+            if _should_send_default_pii():
+                user_info = event.setdefault("user", {})
+                user_info["ip_address"] = client_ip
+
+            request_info["url"] = request_url
+            request_info["query_string"] = query_string
+            request_info["method"] = method
+            request_info["env"] = env
+            request_info["headers"] = headers
+
+        return event
+
+    return event_processor
diff --git a/sentry_sdk/py.typed b/sentry_sdk/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
new file mode 100644
index 0000000..7fc5eac
--- /dev/null
+++ b/sentry_sdk/scope.py
@@ -0,0 +1,324 @@
+from copy import copy
+from collections import deque
+from functools import wraps
+from itertools import chain
+
+from sentry_sdk.utils import logger, capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Deque
+    from typing import List
+    from typing import Callable
+    from typing import TypeVar
+
+    from sentry_sdk._types import (
+        Breadcrumb,
+        Event,
+        EventProcessor,
+        ErrorProcessor,
+        Hint,
+    )
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+global_event_processors = []  # type: List[EventProcessor]
+
+
+def add_global_event_processor(processor):
+    # type: (EventProcessor) -> None
+    global_event_processors.append(processor)
+
+
+def _attr_setter(fn):
+    return property(fset=fn, doc=fn.__doc__)
+
+
+def _disable_capture(fn):
+    # type: (F) -> F
+    @wraps(fn)
+    def wrapper(self, *args, **kwargs):
+        # type: (Any, *Dict[str, Any], **Any) -> Any
+        if not self._should_capture:
+            return
+        try:
+            self._should_capture = False
+            return fn(self, *args, **kwargs)
+        finally:
+            self._should_capture = True
+
+    return wrapper  # type: ignore
+
+
+class Scope(object):
+    """The scope holds extra information that should be sent with all
+    events that belong to it.
+    """
+
+    __slots__ = (
+        "_level",
+        "_name",
+        "_fingerprint",
+        "_transaction",
+        "_user",
+        "_tags",
+        "_contexts",
+        "_extras",
+        "_breadcrumbs",
+        "_event_processors",
+        "_error_processors",
+        "_should_capture",
+        "_span",
+    )
+
+    def __init__(self):
+        # type: () -> None
+        self._event_processors = []  # type: List[EventProcessor]
+        self._error_processors = []  # type: List[ErrorProcessor]
+
+        self._name = None  # type: Optional[str]
+        self.clear()
+
+    @_attr_setter
+    def level(self, value):
+        """When set this overrides the level."""
+        self._level = value
+
+    @_attr_setter
+    def fingerprint(self, value):
+        """When set this overrides the default fingerprint."""
+        self._fingerprint = value
+
+    @_attr_setter
+    def transaction(self, value):
+        """When set this forces a specific transaction name to be set."""
+        self._transaction = value
+        if self._span:
+            self._span.transaction = value
+
+    @_attr_setter
+    def user(self, value):
+        """When set a specific user is bound to the scope."""
+        self._user = value
+
+    @property
+    def span(self):
+        """Get/set current tracing span."""
+        return self._span
+
+    @span.setter
+    def span(self, span):
+        self._span = span
+        if span is not None and span.transaction:
+            self._transaction = span.transaction
+
+    def set_tag(
+        self,
+        key,  # type: str
+        value,  # type: Any
+    ):
+        # type: (...) -> None
+        """Sets a tag for a key to a specific value."""
+        self._tags[key] = value
+
+    def remove_tag(
+        self, key  # type: str
+    ):
+        # type: (...) -> None
+        """Removes a specific tag."""
+        self._tags.pop(key, None)
+
+    def set_context(
+        self,
+        key,  # type: str
+        value,  # type: Any
+    ):
+        # type: (...) -> None
+        """Binds a context at a certain key to a specific value."""
+        self._contexts[key] = value
+
+    def remove_context(
+        self, key  # type: str
+    ):
+        # type: (...) -> None
+        """Removes a context."""
+        self._contexts.pop(key, None)
+
+    def set_extra(
+        self,
+        key,  # type: str
+        value,  # type: Any
+    ):
+        # type: (...) -> None
+        """Sets an extra key to a specific value."""
+        self._extras[key] = value
+
+    def remove_extra(
+        self, key  # type: str
+    ):
+        # type: (...) -> None
+        """Removes a specific extra key."""
+        self._extras.pop(key, None)
+
+    def clear(self):
+        # type: () -> None
+        """Clears the entire scope."""
+        self._level = None
+        self._fingerprint = None
+        self._transaction = None
+        self._user = None
+
+        self._tags = {}  # type: Dict[str, Any]
+        self._contexts = {}  # type: Dict[str, Dict[str, Any]]
+        self._extras = {}  # type: Dict[str, Any]
+
+        self.clear_breadcrumbs()
+        self._should_capture = True
+
+        self._span = None
+
+    def clear_breadcrumbs(self):
+        # type: () -> None
+        """Clears breadcrumb buffer."""
+        self._breadcrumbs = deque()  # type: Deque[Breadcrumb]
+
+    def add_event_processor(
+        self, func  # type: EventProcessor
+    ):
+        # type: (...) -> None
+        """Register a scope local event processor on the scope.
+
+        :param func: This function behaves like `before_send.`
+        """
+        if len(self._event_processors) > 20:
+            logger.warning(
+                "Too many event processors on scope! Clearing list to free up some memory: %r",
+                self._event_processors,
+            )
+            del self._event_processors[:]
+
+        self._event_processors.append(func)
+
+    def add_error_processor(
+        self,
+        func,  # type: ErrorProcessor
+        cls=None,  # type: Optional[type]
+    ):
+        # type: (...) -> None
+        """Register a scope local error processor on the scope.
+
+        :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument.
+
+        :param cls: Optionally, only process exceptions of this type.
+        """
+        if cls is not None:
+            cls_ = cls  # For mypy.
+            real_func = func
+
+            def func(event, exc_info):
+                try:
+                    is_inst = isinstance(exc_info[1], cls_)
+                except Exception:
+                    is_inst = False
+                if is_inst:
+                    return real_func(event, exc_info)
+                return event
+
+        self._error_processors.append(func)
+
+    @_disable_capture
+    def apply_to_event(
+        self,
+        event,  # type: Event
+        hint,  # type: Hint
+    ):
+        # type: (...) -> Optional[Event]
+        """Applies the information contained on the scope to the given event."""
+
+        def _drop(event, cause, ty):
+            # type: (Dict[str, Any], Any, str) -> Optional[Any]
+            logger.info("%s (%s) dropped event (%s)", ty, cause, event)
+            return None
+
+        if self._level is not None:
+            event["level"] = self._level
+
+        if event.get("type") != "transaction":
+            event.setdefault("breadcrumbs", []).extend(self._breadcrumbs)
+
+        if event.get("user") is None and self._user is not None:
+            event["user"] = self._user
+
+        if event.get("transaction") is None and self._transaction is not None:
+            event["transaction"] = self._transaction
+
+        if event.get("fingerprint") is None and self._fingerprint is not None:
+            event["fingerprint"] = self._fingerprint
+
+        if self._extras:
+            event.setdefault("extra", {}).update(self._extras)
+
+        if self._tags:
+            event.setdefault("tags", {}).update(self._tags)
+
+        if self._contexts:
+            event.setdefault("contexts", {}).update(self._contexts)
+
+        if self._span is not None:
+            contexts = event.setdefault("contexts", {})
+            if not contexts.get("trace"):
+                contexts["trace"] = self._span.get_trace_context()
+
+        exc_info = hint.get("exc_info")
+        if exc_info is not None:
+            for error_processor in self._error_processors:
+                new_event = error_processor(event, exc_info)
+                if new_event is None:
+                    return _drop(event, error_processor, "error processor")
+                event = new_event
+
+        for event_processor in chain(global_event_processors, self._event_processors):
+            new_event = event
+            with capture_internal_exceptions():
+                new_event = event_processor(event, hint)
+            if new_event is None:
+                return _drop(event, event_processor, "event processor")
+            event = new_event
+
+        return event
+
+    def __copy__(self):
+        # type: () -> Scope
+        rv = object.__new__(self.__class__)  # type: Scope
+
+        rv._level = self._level
+        rv._name = self._name
+        rv._fingerprint = self._fingerprint
+        rv._transaction = self._transaction
+        rv._user = self._user
+
+        rv._tags = dict(self._tags)
+        rv._contexts = dict(self._contexts)
+        rv._extras = dict(self._extras)
+
+        rv._breadcrumbs = copy(self._breadcrumbs)
+        rv._event_processors = list(self._event_processors)
+        rv._error_processors = list(self._error_processors)
+
+        rv._should_capture = self._should_capture
+        rv._span = self._span
+
+        return rv
+
+    def __repr__(self):
+        # type: () -> str
+        return "<%s id=%s name=%s>" % (
+            self.__class__.__name__,
+            hex(id(self)),
+            self._name,
+        )
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
new file mode 100644
index 0000000..feae13f
--- /dev/null
+++ b/sentry_sdk/serializer.py
@@ -0,0 +1,297 @@
+import contextlib
+
+from datetime import datetime
+
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    safe_repr,
+    strip_string,
+)
+
+from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import List
+    from typing import Optional
+    from typing import Callable
+    from typing import Union
+    from typing import Generator
+
+    # https://github.com/python/mypy/issues/5710
+    _NotImplemented = Any
+    ReprProcessor = Callable[[Any, Dict[str, Any]], Union[_NotImplemented, str]]
+    Segment = Union[str, int]
+
+
+if PY2:
+    # Importing ABCs from collections is deprecated, and will stop working in 3.8
+    # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
+    from collections import Mapping, Sequence
+else:
+    # New in 3.3
+    # https://docs.python.org/3/library/collections.abc.html
+    from collections.abc import Mapping, Sequence
+
+MAX_DATABAG_DEPTH = 5
+MAX_DATABAG_BREADTH = 10
+CYCLE_MARKER = u""
+
+
+global_repr_processors = []  # type: List[ReprProcessor]
+
+
+def add_global_repr_processor(processor):
+    # type: (ReprProcessor) -> None
+    global_repr_processors.append(processor)
+
+
+class MetaNode(object):
+    __slots__ = (
+        "_parent",
+        "_segment",
+        "_depth",
+        "_data",
+        "_is_databag",
+        "_should_repr_strings",
+    )
+
+    def __init__(self):
+        # type: () -> None
+        self._parent = None  # type: Optional[MetaNode]
+        self._segment = None  # type: Optional[Segment]
+        self._depth = 0  # type: int
+        self._data = None  # type: Optional[Dict[str, Any]]
+        self._is_databag = None  # type: Optional[bool]
+        self._should_repr_strings = None  # type: Optional[bool]
+
+    def startswith_path(self, path):
+        # type: (List[Optional[str]]) -> bool
+        if len(path) > self._depth:
+            return False
+
+        return self.is_path(path + [None] * (self._depth - len(path)))
+
+    def is_path(self, path):
+        # type: (List[Optional[str]]) -> bool
+        if len(path) != self._depth:
+            return False
+
+        cur = self
+        for segment in reversed(path):
+            if segment is not None and segment != cur._segment:
+                return False
+            assert cur._parent is not None
+            cur = cur._parent
+
+        return cur._segment is None
+
+    def enter(self, segment):
+        # type: (Segment) -> MetaNode
+        rv = MetaNode()
+        rv._parent = self
+        rv._depth = self._depth + 1
+        rv._segment = segment
+        return rv
+
+    def _create_annotations(self):
+        # type: () -> None
+        if self._data is not None:
+            return
+
+        self._data = {}
+        if self._parent is not None:
+            self._parent._create_annotations()
+            self._parent._data[str(self._segment)] = self._data  # type: ignore
+
+    def annotate(self, **meta):
+        # type: (Any) -> None
+        self._create_annotations()
+        assert self._data is not None
+        self._data.setdefault("", {}).update(meta)
+
+    def should_repr_strings(self):
+        # type: () -> bool
+        if self._should_repr_strings is None:
+            self._should_repr_strings = (
+                self.startswith_path(
+                    ["exception", "values", None, "stacktrace", "frames", None, "vars"]
+                )
+                or self.startswith_path(
+                    ["threads", "values", None, "stacktrace", "frames", None, "vars"]
+                )
+                or self.startswith_path(["stacktrace", "frames", None, "vars"])
+            )
+
+        return self._should_repr_strings
+
+    def is_databag(self):
+        # type: () -> bool
+        if self._is_databag is None:
+            self._is_databag = (
+                self.startswith_path(["request", "data"])
+                or self.startswith_path(["breadcrumbs", None])
+                or self.startswith_path(["extra"])
+                or self.startswith_path(
+                    ["exception", "values", None, "stacktrace", "frames", None, "vars"]
+                )
+                or self.startswith_path(
+                    ["threads", "values", None, "stacktrace", "frames", None, "vars"]
+                )
+                or self.startswith_path(["stacktrace", "frames", None, "vars"])
+            )
+
+        return self._is_databag
+
+
+def _flatten_annotated(obj, meta_node):
+    # type: (Any, MetaNode) -> Any
+    if isinstance(obj, AnnotatedValue):
+        meta_node.annotate(**obj.metadata)
+        obj = obj.value
+    return obj
+
+
+class Memo(object):
+    def __init__(self):
+        # type: () -> None
+        self._inner = {}  # type: Dict[int, Any]
+
+    @contextlib.contextmanager
+    def memoize(self, obj):
+        # type: (Any) -> Generator[bool, None, None]
+        if id(obj) in self._inner:
+            yield True
+        else:
+            self._inner[id(obj)] = obj
+            yield False
+
+            self._inner.pop(id(obj), None)
+
+
+class Serializer(object):
+    def __init__(self):
+        # type: () -> None
+        self.memo = Memo()
+        self.meta_node = MetaNode()
+
+    @contextlib.contextmanager
+    def enter(self, segment):
+        # type: (Segment) -> Generator[None, None, None]
+        old_node = self.meta_node
+        self.meta_node = self.meta_node.enter(segment)
+
+        try:
+            yield
+        finally:
+            self.meta_node = old_node
+
+    def serialize_event(self, obj):
+        # type: (Any) -> Dict[str, Any]
+        rv = self._serialize_node(obj)
+        if self.meta_node._data is not None:
+            rv["_meta"] = self.meta_node._data
+        return rv
+
+    def _serialize_node(self, obj, max_depth=None, max_breadth=None):
+        # type: (Any, Optional[int], Optional[int]) -> Any
+        with capture_internal_exceptions():
+            with self.memo.memoize(obj) as result:
+                if result:
+                    return CYCLE_MARKER
+
+                return self._serialize_node_impl(
+                    obj, max_depth=max_depth, max_breadth=max_breadth
+                )
+
+        if self.meta_node.is_databag():
+            return u""
+
+        return None
+
+    def _serialize_node_impl(self, obj, max_depth, max_breadth):
+        # type: (Any, Optional[int], Optional[int]) -> Any
+        if max_depth is None and max_breadth is None and self.meta_node.is_databag():
+            max_depth = self.meta_node._depth + MAX_DATABAG_DEPTH
+            max_breadth = self.meta_node._depth + MAX_DATABAG_BREADTH
+
+        if max_depth is None:
+            remaining_depth = None
+        else:
+            remaining_depth = max_depth - self.meta_node._depth
+
+        obj = _flatten_annotated(obj, self.meta_node)
+
+        if remaining_depth is not None and remaining_depth <= 0:
+            self.meta_node.annotate(rem=[["!limit", "x"]])
+            if self.meta_node.is_databag():
+                return _flatten_annotated(strip_string(safe_repr(obj)), self.meta_node)
+            return None
+
+        if self.meta_node.is_databag():
+            hints = {"memo": self.memo, "remaining_depth": remaining_depth}
+            for processor in global_repr_processors:
+                with capture_internal_exceptions():
+                    result = processor(obj, hints)
+                    if result is not NotImplemented:
+                        return _flatten_annotated(result, self.meta_node)
+
+        if isinstance(obj, Mapping):
+            # Create temporary list here to avoid calling too much code that
+            # might mutate our dictionary while we're still iterating over it.
+            items = []
+            for i, (k, v) in enumerate(iteritems(obj)):
+                if max_breadth is not None and i >= max_breadth:
+                    self.meta_node.annotate(len=max_breadth)
+                    break
+
+                items.append((k, v))
+
+            rv_dict = {}  # type: Dict[Any, Any]
+            for k, v in items:
+                k = text_type(k)
+
+                with self.enter(k):
+                    v = self._serialize_node(
+                        v, max_depth=max_depth, max_breadth=max_breadth
+                    )
+                    if v is not None:
+                        rv_dict[k] = v
+
+            return rv_dict
+        elif isinstance(obj, Sequence) and not isinstance(obj, string_types):
+            rv_list = []  # type: List[Any]
+            for i, v in enumerate(obj):
+                if max_breadth is not None and i >= max_breadth:
+                    self.meta_node.annotate(len=max_breadth)
+                    break
+
+                with self.enter(i):
+                    rv_list.append(
+                        self._serialize_node(
+                            v, max_depth=max_depth, max_breadth=max_breadth
+                        )
+                    )
+
+            return rv_list
+
+        if self.meta_node.should_repr_strings():
+            obj = safe_repr(obj)
+        else:
+            if obj is None or isinstance(obj, (bool, number_types)):
+                return obj
+
+            if isinstance(obj, datetime):
+                return text_type(obj.strftime("%Y-%m-%dT%H:%M:%S.%fZ"))
+
+            if isinstance(obj, bytes):
+                obj = obj.decode("utf-8", "replace")
+
+            if not isinstance(obj, string_types):
+                obj = safe_repr(obj)
+
+        return _flatten_annotated(strip_string(obj), self.meta_node)
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
new file mode 100644
index 0000000..748c00a
--- /dev/null
+++ b/sentry_sdk/tracing.py
@@ -0,0 +1,454 @@
+import re
+import uuid
+import contextlib
+
+from datetime import datetime
+
+import sentry_sdk
+from sentry_sdk.utils import capture_internal_exceptions, logger
+from sentry_sdk._compat import PY2
+from sentry_sdk._types import MYPY
+
+if PY2:
+    from collections import Mapping
+else:
+    from collections.abc import Mapping
+
+if MYPY:
+    import typing
+
+    from typing import Generator
+    from typing import Optional
+    from typing import Any
+    from typing import Dict
+    from typing import List
+    from typing import Tuple
+
+_traceparent_header_format_re = re.compile(
+    "^[ \t]*"  # whitespace
+    "([0-9a-f]{32})?"  # trace_id
+    "-?([0-9a-f]{16})?"  # span_id
+    "-?([01])?"  # sampled
+    "[ \t]*$"  # whitespace
+)
+
+
+class EnvironHeaders(Mapping):  # type: ignore
+    def __init__(
+        self,
+        environ,  # type: typing.Mapping[str, str]
+        prefix="HTTP_",  # type: str
+    ):
+        # type: (...) -> None
+        self.environ = environ
+        self.prefix = prefix
+
+    def __getitem__(self, key):
+        # type: (str) -> Optional[Any]
+        return self.environ[self.prefix + key.replace("-", "_").upper()]
+
+    def __len__(self):
+        # type: () -> int
+        return sum(1 for _ in iter(self))
+
+    def __iter__(self):
+        # type: () -> Generator[str, None, None]
+        for k in self.environ:
+            if not isinstance(k, str):
+                continue
+
+            k = k.replace("-", "_").upper()
+            if not k.startswith(self.prefix):
+                continue
+
+            yield k[len(self.prefix) :]
+
+
+class _SpanRecorder(object):
+    __slots__ = ("maxlen", "finished_spans", "open_span_count")
+
+    def __init__(self, maxlen):
+        # type: (int) -> None
+        self.maxlen = maxlen
+        self.open_span_count = 0  # type: int
+        self.finished_spans = []  # type: List[Span]
+
+    def start_span(self, span):
+        # type: (Span) -> None
+
+        # This is just so that we don't run out of memory while recording a lot
+        # of spans. At some point we just stop and flush out the start of the
+        # trace tree (i.e. the first n spans with the smallest
+        # start_timestamp).
+        self.open_span_count += 1
+        if self.open_span_count > self.maxlen:
+            span._span_recorder = None
+
+    def finish_span(self, span):
+        # type: (Span) -> None
+        self.finished_spans.append(span)
+
+
+class Span(object):
+    __slots__ = (
+        "trace_id",
+        "span_id",
+        "parent_span_id",
+        "same_process_as_parent",
+        "sampled",
+        "transaction",
+        "op",
+        "description",
+        "start_timestamp",
+        "timestamp",
+        "_tags",
+        "_data",
+        "_span_recorder",
+        "hub",
+        "_context_manager_state",
+    )
+
+    def __init__(
+        self,
+        trace_id=None,  # type: Optional[str]
+        span_id=None,  # type: Optional[str]
+        parent_span_id=None,  # type: Optional[str]
+        same_process_as_parent=True,  # type: bool
+        sampled=None,  # type: Optional[bool]
+        transaction=None,  # type: Optional[str]
+        op=None,  # type: Optional[str]
+        description=None,  # type: Optional[str]
+        hub=None,  # type: Optional[sentry_sdk.Hub]
+    ):
+        # type: (...) -> None
+        self.trace_id = trace_id or uuid.uuid4().hex
+        self.span_id = span_id or uuid.uuid4().hex[16:]
+        self.parent_span_id = parent_span_id
+        self.same_process_as_parent = same_process_as_parent
+        self.sampled = sampled
+        self.transaction = transaction
+        self.op = op
+        self.description = description
+        self.hub = hub
+        self._tags = {}  # type: Dict[str, str]
+        self._data = {}  # type: Dict[str, Any]
+        self.start_timestamp = datetime.now()
+
+        #: End timestamp of span
+        self.timestamp = None  # type: Optional[datetime]
+
+        self._span_recorder = None  # type: Optional[_SpanRecorder]
+
+    def init_finished_spans(self, maxlen):
+        # type: (int) -> None
+        if self._span_recorder is None:
+            self._span_recorder = _SpanRecorder(maxlen)
+        self._span_recorder.start_span(self)
+
+    def __repr__(self):
+        # type: () -> str
+        return (
+            "<%s(transaction=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
+            % (
+                self.__class__.__name__,
+                self.transaction,
+                self.trace_id,
+                self.span_id,
+                self.parent_span_id,
+                self.sampled,
+            )
+        )
+
+    def __enter__(self):
+        # type: () -> Span
+        hub = self.hub or sentry_sdk.Hub.current
+
+        _, scope = hub._stack[-1]
+        old_span = scope.span
+        scope.span = self
+        self._context_manager_state = (hub, scope, old_span)
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        if value is not None:
+            self.set_failure()
+
+        hub, scope, old_span = self._context_manager_state
+        del self._context_manager_state
+
+        self.finish(hub)
+        scope.span = old_span
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> Span
+        rv = type(self)(
+            trace_id=self.trace_id,
+            span_id=None,
+            parent_span_id=self.span_id,
+            sampled=self.sampled,
+            **kwargs
+        )
+
+        rv._span_recorder = self._span_recorder
+        return rv
+
+    @classmethod
+    def continue_from_environ(cls, environ):
+        # type: (typing.Mapping[str, str]) -> Span
+        return cls.continue_from_headers(EnvironHeaders(environ))
+
+    @classmethod
+    def continue_from_headers(cls, headers):
+        # type: (typing.Mapping[str, str]) -> Span
+        parent = cls.from_traceparent(headers.get("sentry-trace"))
+        if parent is None:
+            return cls()
+        return parent.new_span(same_process_as_parent=False)
+
+    def iter_headers(self):
+        # type: () -> Generator[Tuple[str, str], None, None]
+        yield "sentry-trace", self.to_traceparent()
+
+    @classmethod
+    def from_traceparent(cls, traceparent):
+        # type: (Optional[str]) -> Optional[Span]
+        if not traceparent:
+            return None
+
+        if traceparent.startswith("00-") and traceparent.endswith("-00"):
+            traceparent = traceparent[3:-3]
+
+        match = _traceparent_header_format_re.match(str(traceparent))
+        if match is None:
+            return None
+
+        trace_id, span_id, sampled_str = match.groups()
+
+        if trace_id is not None:
+            trace_id = "{:032x}".format(int(trace_id, 16))
+        if span_id is not None:
+            span_id = "{:016x}".format(int(span_id, 16))
+
+        if sampled_str:
+            sampled = sampled_str != "0"  # type: Optional[bool]
+        else:
+            sampled = None
+
+        return cls(trace_id=trace_id, span_id=span_id, sampled=sampled)
+
+    def to_traceparent(self):
+        # type: () -> str
+        sampled = ""
+        if self.sampled is True:
+            sampled = "1"
+        if self.sampled is False:
+            sampled = "0"
+        return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
+
+    def to_legacy_traceparent(self):
+        # type: () -> str
+        return "00-%s-%s-00" % (self.trace_id, self.span_id)
+
+    def set_tag(self, key, value):
+        # type: (str, Any) -> None
+        self._tags[key] = value
+
+    def set_data(self, key, value):
+        # type: (str, Any) -> None
+        self._data[key] = value
+
+    def set_failure(self):
+        # type: () -> None
+        self.set_tag("status", "failure")
+
+    def set_success(self):
+        # type: () -> None
+        self.set_tag("status", "success")
+
+    def is_success(self):
+        # type: () -> bool
+        return self._tags.get("status") in (None, "success")
+
+    def finish(self, hub=None):
+        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+        hub = hub or self.hub or sentry_sdk.Hub.current
+
+        if self.timestamp is not None:
+            # This transaction is already finished, so we should not flush it again.
+            return None
+
+        self.timestamp = datetime.now()
+
+        _maybe_create_breadcrumbs_from_span(hub, self)
+
+        if self._span_recorder is None:
+            return None
+
+        self._span_recorder.finish_span(self)
+
+        if self.transaction is None:
+            # If this has no transaction set we assume there's a parent
+            # transaction for this span that would be flushed out eventually.
+            return None
+
+        if hub.client is None:
+            # We have no client and therefore nowhere to send this transaction
+            # event.
+            return None
+
+        if not self.sampled:
+            # At this point a `sampled = None` should have already been
+            # resolved to a concrete decision. If `sampled` is `None`, it's
+            # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a
+            # non-transaction span and later decided to make it a transaction.
+            if self.sampled is None:
+                logger.warning("Discarding transaction Span without sampling decision")
+
+            return None
+
+        return hub.capture_event(
+            {
+                "type": "transaction",
+                "transaction": self.transaction,
+                "contexts": {"trace": self.get_trace_context()},
+                "timestamp": self.timestamp,
+                "start_timestamp": self.start_timestamp,
+                "spans": [
+                    s.to_json()
+                    for s in self._span_recorder.finished_spans
+                    if s is not self
+                ],
+            }
+        )
+
+    def to_json(self):
+        # type: () -> Any
+        rv = {
+            "trace_id": self.trace_id,
+            "span_id": self.span_id,
+            "parent_span_id": self.parent_span_id,
+            "same_process_as_parent": self.same_process_as_parent,
+            "transaction": self.transaction,
+            "op": self.op,
+            "description": self.description,
+            "start_timestamp": self.start_timestamp,
+            "timestamp": self.timestamp,
+            "tags": self._tags,
+            "data": self._data,
+        }
+
+        return rv
+
+    def get_trace_context(self):
+        # type: () -> Any
+        rv = {
+            "trace_id": self.trace_id,
+            "span_id": self.span_id,
+            "parent_span_id": self.parent_span_id,
+            "op": self.op,
+            "description": self.description,
+        }
+
+        if "status" in self._tags:
+            rv["status"] = self._tags["status"]
+
+        return rv
+
+
+def _format_sql(cursor, sql):
+    # type: (Any, str) -> Optional[str]
+
+    real_sql = None
+
+    # If we're using psycopg2, it could be that we're
+    # looking at a query that uses Composed objects. Use psycopg2's mogrify
+    # function to format the query. We lose per-parameter trimming but gain
+    # accuracy in formatting.
+    try:
+        if hasattr(cursor, "mogrify"):
+            real_sql = cursor.mogrify(sql)
+            if isinstance(real_sql, bytes):
+                real_sql = real_sql.decode(cursor.connection.encoding)
+    except Exception:
+        real_sql = None
+
+    return real_sql or str(sql)
+
+
+@contextlib.contextmanager
+def record_sql_queries(
+    hub,  # type: sentry_sdk.Hub
+    cursor,  # type: Any
+    query,  # type: Any
+    params_list,  # type:  Any
+    paramstyle,  # type: Optional[str]
+    executemany,  # type: bool
+):
+    # type: (...) -> Generator[Span, None, None]
+
+    # TODO: Bring back capturing of params by default
+    if hub.client and hub.client.options["_experiments"].get(
+        "record_sql_params", False
+    ):
+        if not params_list or params_list == [None]:
+            params_list = None
+
+        if paramstyle == "pyformat":
+            paramstyle = "format"
+    else:
+        params_list = None
+        paramstyle = None
+
+    query = _format_sql(cursor, query)
+
+    data = {"db.params": params_list, "db.paramstyle": paramstyle}
+    if executemany:
+        data["db.executemany"] = True
+
+    with capture_internal_exceptions():
+        hub.add_breadcrumb(message=query, category="query", data=data)
+
+    with hub.start_span(op="db", description=query) as span:
+        for k, v in data.items():
+            span.set_data(k, v)
+        yield span
+
+
+@contextlib.contextmanager
+def record_http_request(hub, url, method):
+    # type: (sentry_sdk.Hub, str, str) -> Generator[Dict[str, str], None, None]
+    data_dict = {"url": url, "method": method}
+
+    with hub.start_span(op="http", description="%s %s" % (url, method)) as span:
+        try:
+            yield data_dict
+        finally:
+            if span is not None:
+                if "status_code" in data_dict:
+                    span.set_tag("http.status_code", data_dict["status_code"])
+                for k, v in data_dict.items():
+                    span.set_data(k, v)
+
+
+def _maybe_create_breadcrumbs_from_span(hub, span):
+    # type: (sentry_sdk.Hub, Span) -> None
+    if span.op == "redis":
+        hub.add_breadcrumb(
+            message=span.description, type="redis", category="redis", data=span._tags
+        )
+    elif span.op == "http" and span.is_success():
+        hub.add_breadcrumb(
+            type="http",
+            category="httplib",
+            data=span._data,
+            hint={"httplib_response": span._data.pop("httplib_response", None)},
+        )
+    elif span.op == "subprocess":
+        hub.add_breadcrumb(
+            type="subprocess",
+            category="subprocess",
+            message=span.description,
+            data=span._data,
+            hint={"popen_instance": span._data.pop("popen_instance", None)},
+        )
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
new file mode 100644
index 0000000..b46d55e
--- /dev/null
+++ b/sentry_sdk/transport.py
@@ -0,0 +1,263 @@
+from __future__ import print_function
+
+import json
+import io
+import urllib3  # type: ignore
+import certifi
+import gzip
+
+from datetime import datetime, timedelta
+
+from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions
+from sentry_sdk.worker import BackgroundWorker
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Type
+    from typing import Any
+    from typing import Optional
+    from typing import Dict
+    from typing import Union
+    from typing import Callable
+    from urllib3.poolmanager import PoolManager  # type: ignore
+    from urllib3.poolmanager import ProxyManager
+
+    from sentry_sdk._types import Event
+
+try:
+    from urllib.request import getproxies
+except ImportError:
+    from urllib import getproxies  # type: ignore
+
+
+class Transport(object):
+    """Baseclass for all transports.
+
+    A transport is used to send an event to sentry.
+    """
+
+    parsed_dsn = None  # type: Optional[Dsn]
+
+    def __init__(
+        self, options=None  # type: Optional[Dict[str, Any]]
+    ):
+        # type: (...) -> None
+        self.options = options
+        if options and options["dsn"] is not None and options["dsn"]:
+            self.parsed_dsn = Dsn(options["dsn"])
+        else:
+            self.parsed_dsn = None
+
+    def capture_event(
+        self, event  # type: Event
+    ):
+        # type: (...) -> None
+        """This gets invoked with the event dictionary when an event should
+        be sent to sentry.
+        """
+        raise NotImplementedError()
+
+    def flush(
+        self,
+        timeout,  # type: float
+        callback=None,  # type: Optional[Any]
+    ):
+        # type: (...) -> None
+        """Wait `timeout` seconds for the current events to be sent out."""
+        pass
+
+    def kill(self):
+        # type: () -> None
+        """Forcefully kills the transport."""
+        pass
+
+    def __del__(self):
+        # type: () -> None
+        try:
+            self.kill()
+        except Exception:
+            pass
+
+
+class HttpTransport(Transport):
+    """The default HTTP transport."""
+
+    def __init__(
+        self, options  # type: Dict[str, Any]
+    ):
+        # type: (...) -> None
+        from sentry_sdk.consts import VERSION
+
+        Transport.__init__(self, options)
+        assert self.parsed_dsn is not None
+        self._worker = BackgroundWorker()
+        self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
+        self._disabled_until = None  # type: Optional[datetime]
+        self._retry = urllib3.util.Retry()
+        self.options = options
+
+        self._pool = self._make_pool(
+            self.parsed_dsn,
+            http_proxy=options["http_proxy"],
+            https_proxy=options["https_proxy"],
+            ca_certs=options["ca_certs"],
+        )
+
+        from sentry_sdk import Hub
+
+        self.hub_cls = Hub
+
+    def _send_event(
+        self, event  # type: Event
+    ):
+        # type: (...) -> None
+        if self._disabled_until is not None:
+            if datetime.utcnow() < self._disabled_until:
+                return
+            self._disabled_until = None
+
+        body = io.BytesIO()
+        with gzip.GzipFile(fileobj=body, mode="w") as f:
+            f.write(json.dumps(event, allow_nan=False).encode("utf-8"))
+
+        assert self.parsed_dsn is not None
+        logger.debug(
+            "Sending event, type:%s level:%s event_id:%s project:%s host:%s"
+            % (
+                event.get("type") or "null",
+                event.get("level") or "null",
+                event.get("event_id") or "null",
+                self.parsed_dsn.project_id,
+                self.parsed_dsn.host,
+            )
+        )
+        response = self._pool.request(
+            "POST",
+            str(self._auth.store_api_url),
+            body=body.getvalue(),
+            headers={
+                "User-Agent": str(self._auth.client),
+                "X-Sentry-Auth": str(self._auth.to_header()),
+                "Content-Type": "application/json",
+                "Content-Encoding": "gzip",
+            },
+        )
+
+        try:
+            if response.status == 429:
+                self._disabled_until = datetime.utcnow() + timedelta(
+                    seconds=self._retry.get_retry_after(response) or 60
+                )
+                return
+
+            elif response.status >= 300 or response.status < 200:
+                logger.error(
+                    "Unexpected status code: %s (body: %s)",
+                    response.status,
+                    response.data,
+                )
+        finally:
+            response.close()
+
+        self._disabled_until = None
+
+    def _get_pool_options(self, ca_certs):
+        # type: (Optional[Any]) -> Dict[str, Any]
+        return {
+            "num_pools": 2,
+            "cert_reqs": "CERT_REQUIRED",
+            "ca_certs": ca_certs or certifi.where(),
+        }
+
+    def _make_pool(
+        self,
+        parsed_dsn,  # type: Dsn
+        http_proxy,  # type: Optional[str]
+        https_proxy,  # type: Optional[str]
+        ca_certs,  # type: Optional[Any]
+    ):
+        # type: (...) -> Union[PoolManager, ProxyManager]
+        proxy = None
+
+        # try HTTPS first
+        if parsed_dsn.scheme == "https" and (https_proxy != ""):
+            proxy = https_proxy or getproxies().get("https")
+
+        # maybe fallback to HTTP proxy
+        if not proxy and (http_proxy != ""):
+            proxy = http_proxy or getproxies().get("http")
+
+        opts = self._get_pool_options(ca_certs)
+
+        if proxy:
+            return urllib3.ProxyManager(proxy, **opts)
+        else:
+            return urllib3.PoolManager(**opts)
+
+    def capture_event(
+        self, event  # type: Event
+    ):
+        # type: (...) -> None
+        hub = self.hub_cls.current
+
+        def send_event_wrapper():
+            # type: () -> None
+            with hub:
+                with capture_internal_exceptions():
+                    self._send_event(event)
+
+        self._worker.submit(send_event_wrapper)
+
+    def flush(
+        self,
+        timeout,  # type: float
+        callback=None,  # type: Optional[Any]
+    ):
+        # type: (...) -> None
+        logger.debug("Flushing HTTP transport")
+        if timeout > 0:
+            self._worker.flush(timeout, callback)
+
+    def kill(self):
+        # type: () -> None
+        logger.debug("Killing HTTP transport")
+        self._worker.kill()
+
+
+class _FunctionTransport(Transport):
+    def __init__(
+        self, func  # type: Callable[[Event], None]
+    ):
+        # type: (...) -> None
+        Transport.__init__(self)
+        self._func = func
+
+    def capture_event(
+        self, event  # type: Event
+    ):
+        # type: (...) -> None
+        self._func(event)
+        return None
+
+
+def make_transport(options):
+    # type: (Dict[str, Any]) -> Optional[Transport]
+    ref_transport = options["transport"]
+
+    # If no transport is given, we use the http transport class
+    if ref_transport is None:
+        transport_cls = HttpTransport  # type: Type[Transport]
+    elif isinstance(ref_transport, Transport):
+        return ref_transport
+    elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
+        transport_cls = ref_transport
+    elif callable(ref_transport):
+        return _FunctionTransport(ref_transport)  # type: ignore
+
+    # if a transport class is given only instanciate it if the dsn is not
+    # empty or None
+    if options["dsn"]:
+        return transport_cls(options)
+
+    return None
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
new file mode 100644
index 0000000..b9ba6c3
--- /dev/null
+++ b/sentry_sdk/utils.py
@@ -0,0 +1,770 @@
+import os
+import sys
+import linecache
+import logging
+
+from contextlib import contextmanager
+from datetime import datetime
+
+from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Iterator
+    from typing import List
+    from typing import Optional
+    from typing import Set
+    from typing import Tuple
+    from typing import Union
+    from types import FrameType
+    from types import TracebackType
+
+    import sentry_sdk
+
+    from sentry_sdk._types import ExcInfo
+
+epoch = datetime(1970, 1, 1)
+
+
+# The logger is created here but initialized in the debug support module
+logger = logging.getLogger("sentry_sdk.errors")
+
+MAX_STRING_LENGTH = 512
+MAX_FORMAT_PARAM_LENGTH = 128
+
+
+def _get_debug_hub():
+    # type: () -> Optional[sentry_sdk.Hub]
+    # This function is replaced by debug.py
+    pass
+
+
+@contextmanager
+def capture_internal_exceptions():
+    # type: () -> Iterator
+    try:
+        yield
+    except Exception:
+        hub = _get_debug_hub()
+        if hub is not None:
+            hub._capture_internal_exception(sys.exc_info())
+
+
+def to_timestamp(value):
+    # type: (datetime) -> float
+    return (value - epoch).total_seconds()
+
+
+def event_hint_with_exc_info(exc_info=None):
+    # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]]
+    """Creates a hint with the exc info filled in."""
+    if exc_info is None:
+        exc_info = sys.exc_info()
+    else:
+        exc_info = exc_info_from_error(exc_info)
+    if exc_info[0] is None:
+        exc_info = None
+    return {"exc_info": exc_info}
+
+
+class BadDsn(ValueError):
+    """Raised on invalid DSNs."""
+
+
+@implements_str
+class Dsn(object):
+    """Represents a DSN."""
+
+    def __init__(self, value):
+        # type: (Union[Dsn, str]) -> None
+        if isinstance(value, Dsn):
+            self.__dict__ = dict(value.__dict__)
+            return
+        parts = urlparse.urlsplit(text_type(value))
+        if parts.scheme not in (u"http", u"https"):
+            raise BadDsn("Unsupported scheme %r" % parts.scheme)
+        self.scheme = parts.scheme
+        self.host = parts.hostname
+        self.port = parts.port
+        if self.port is None:
+            self.port = self.scheme == "https" and 443 or 80
+        self.public_key = parts.username
+        if not self.public_key:
+            raise BadDsn("Missing public key")
+        self.secret_key = parts.password
+
+        path = parts.path.rsplit("/", 1)
+
+        try:
+            self.project_id = text_type(int(path.pop()))
+        except (ValueError, TypeError):
+            raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
+
+        self.path = "/".join(path) + "/"
+
+    @property
+    def netloc(self):
+        # type: () -> str
+        """The netloc part of a DSN."""
+        rv = self.host
+        if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
+            rv = "%s:%s" % (rv, self.port)
+        return rv
+
+    def to_auth(self, client=None):
+        # type: (Optional[Any]) -> Auth
+        """Returns the auth info object for this dsn."""
+        return Auth(
+            scheme=self.scheme,
+            host=self.netloc,
+            path=self.path,
+            project_id=self.project_id,
+            public_key=self.public_key,
+            secret_key=self.secret_key,
+            client=client,
+        )
+
+    def __str__(self):
+        # type: () -> str
+        return "%s://%s%s@%s%s%s" % (
+            self.scheme,
+            self.public_key,
+            self.secret_key and "@" + self.secret_key or "",
+            self.netloc,
+            self.path,
+            self.project_id,
+        )
+
+
+class Auth(object):
+    """Helper object that represents the auth info."""
+
+    def __init__(
+        self,
+        scheme,
+        host,
+        project_id,
+        public_key,
+        secret_key=None,
+        version=7,
+        client=None,
+        path="/",
+    ):
+        # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None
+        self.scheme = scheme
+        self.host = host
+        self.path = path
+        self.project_id = project_id
+        self.public_key = public_key
+        self.secret_key = secret_key
+        self.version = version
+        self.client = client
+
+    @property
+    def store_api_url(self):
+        # type: () -> str
+        """Returns the API url for storing events."""
+        return "%s://%s%sapi/%s/store/" % (
+            self.scheme,
+            self.host,
+            self.path,
+            self.project_id,
+        )
+
+    def to_header(self, timestamp=None):
+        # type: (Optional[datetime]) -> str
+        """Returns the auth header a string."""
+        rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
+        if timestamp is not None:
+            rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
+        if self.client is not None:
+            rv.append(("sentry_client", self.client))
+        if self.secret_key is not None:
+            rv.append(("sentry_secret", self.secret_key))
+        return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv)
+
+
+class AnnotatedValue(object):
+    def __init__(self, value, metadata):
+        # type: (Optional[Any], Dict[str, Any]) -> None
+        self.value = value
+        self.metadata = metadata
+
+
+if MYPY:
+    from typing import TypeVar
+
+    T = TypeVar("T")
+    Annotated = Union[AnnotatedValue, T]
+
+
+def get_type_name(cls):
+    # type: (Optional[type]) -> Optional[str]
+    return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None)
+
+
+def get_type_module(cls):
+    # type: (Optional[type]) -> Optional[str]
+    mod = getattr(cls, "__module__", None)
+    if mod not in (None, "builtins", "__builtins__"):
+        return mod
+    return None
+
+
+def should_hide_frame(frame):
+    # type: (FrameType) -> bool
+    try:
+        mod = frame.f_globals["__name__"]
+        if mod.startswith("sentry_sdk."):
+            return True
+    except (AttributeError, KeyError):
+        pass
+
+    for flag_name in "__traceback_hide__", "__tracebackhide__":
+        try:
+            if frame.f_locals[flag_name]:
+                return True
+        except Exception:
+            pass
+
+    return False
+
+
+def iter_stacks(tb):
+    # type: (Optional[TracebackType]) -> Iterator[TracebackType]
+    tb_ = tb  # type: Optional[TracebackType]
+    while tb_ is not None:
+        if not should_hide_frame(tb_.tb_frame):
+            yield tb_
+        tb_ = tb_.tb_next
+
+
+def get_lines_from_file(
+    filename,  # type: str
+    lineno,  # type: int
+    loader=None,  # type: Optional[Any]
+    module=None,  # type: Optional[str]
+):
+    # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+    context_lines = 5
+    source = None
+    if loader is not None and hasattr(loader, "get_source"):
+        try:
+            source_str = loader.get_source(module)  # type: Optional[str]
+        except (ImportError, IOError):
+            source_str = None
+        if source_str is not None:
+            source = source_str.splitlines()
+
+    if source is None:
+        try:
+            source = linecache.getlines(filename)
+        except (OSError, IOError):
+            return [], None, []
+
+    if not source:
+        return [], None, []
+
+    lower_bound = max(0, lineno - context_lines)
+    upper_bound = min(lineno + 1 + context_lines, len(source))
+
+    try:
+        pre_context = [
+            strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
+        ]
+        context_line = strip_string(source[lineno].strip("\r\n"))
+        post_context = [
+            strip_string(line.strip("\r\n"))
+            for line in source[(lineno + 1) : upper_bound]
+        ]
+        return pre_context, context_line, post_context
+    except IndexError:
+        # the file may have changed since it was loaded into memory
+        return [], None, []
+
+
+def get_source_context(
+    frame,  # type: FrameType
+    tb_lineno,  # type: int
+):
+    # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+    try:
+        abs_path = frame.f_code.co_filename  # type: Optional[str]
+    except Exception:
+        abs_path = None
+    try:
+        module = frame.f_globals["__name__"]
+    except Exception:
+        return [], None, []
+    try:
+        loader = frame.f_globals["__loader__"]
+    except Exception:
+        loader = None
+    lineno = tb_lineno - 1
+    if lineno is not None and abs_path:
+        return get_lines_from_file(abs_path, lineno, loader, module)
+    return [], None, []
+
+
+def safe_str(value):
+    # type: (Any) -> str
+    try:
+        return text_type(value)
+    except Exception:
+        return safe_repr(value)
+
+
+def safe_repr(value):
+    # type: (Any) -> str
+    try:
+        rv = repr(value)
+        if isinstance(rv, bytes):
+            rv = rv.decode("utf-8", "replace")
+
+        # At this point `rv` contains a bunch of literal escape codes, like
+        # this (exaggerated example):
+        #
+        # u"\\x2f"
+        #
+        # But we want to show this string as:
+        #
+        # u"/"
+        try:
+            # unicode-escape does this job, but can only decode latin1. So we
+            # attempt to encode in latin1.
+            return rv.encode("latin1").decode("unicode-escape")
+        except Exception:
+            # Since usually strings aren't latin1 this can break. In those
+            # cases we just give up.
+            return rv
+    except Exception:
+        # If e.g. the call to `repr` already fails
+        return u""
+
+
+def filename_for_module(module, abs_path):
+    # type: (Optional[str], Optional[str]) -> Optional[str]
+    if not abs_path or not module:
+        return abs_path
+
+    try:
+        if abs_path.endswith(".pyc"):
+            abs_path = abs_path[:-1]
+
+        base_module = module.split(".", 1)[0]
+        if base_module == module:
+            return os.path.basename(abs_path)
+
+        base_module_path = sys.modules[base_module].__file__
+        return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
+            os.sep
+        )
+    except Exception:
+        return abs_path
+
+
+def serialize_frame(frame, tb_lineno=None, with_locals=True):
+    # type: (FrameType, Optional[int], bool) -> Dict[str, Any]
+    f_code = getattr(frame, "f_code", None)
+    if not f_code:
+        abs_path = None
+        function = None
+    else:
+        abs_path = frame.f_code.co_filename
+        function = frame.f_code.co_name
+    try:
+        module = frame.f_globals["__name__"]
+    except Exception:
+        module = None
+
+    if tb_lineno is None:
+        tb_lineno = frame.f_lineno
+
+    pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
+
+    rv = {
+        "filename": filename_for_module(module, abs_path) or None,
+        "abs_path": os.path.abspath(abs_path) if abs_path else None,
+        "function": function or "",
+        "module": module,
+        "lineno": tb_lineno,
+        "pre_context": pre_context,
+        "context_line": context_line,
+        "post_context": post_context,
+    }  # type: Dict[str, Any]
+    if with_locals:
+        rv["vars"] = frame.f_locals
+    return rv
+
+
+def stacktrace_from_traceback(tb=None, with_locals=True):
+    # type: (Optional[TracebackType], bool) -> Dict[str, List[Dict[str, Any]]]
+    return {
+        "frames": [
+            serialize_frame(
+                tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals
+            )
+            for tb in iter_stacks(tb)
+        ]
+    }
+
+
+def current_stacktrace(with_locals=True):
+    # type: (bool) -> Any
+    __tracebackhide__ = True
+    frames = []
+
+    f = sys._getframe()
+    while f is not None:
+        if not should_hide_frame(f):
+            frames.append(serialize_frame(f, with_locals=with_locals))
+        f = f.f_back
+
+    frames.reverse()
+
+    return {"frames": frames}
+
+
+def get_errno(exc_value):
+    # type: (BaseException) -> Optional[Any]
+    return getattr(exc_value, "errno", None)
+
+
+def single_exception_from_error_tuple(
+    exc_type,  # type: Optional[type]
+    exc_value,  # type: Optional[BaseException]
+    tb,  # type: Optional[TracebackType]
+    client_options=None,  # type: Optional[dict]
+    mechanism=None,  # type: Optional[Dict[str, Any]]
+):
+    # type: (...) -> Dict[str, Any]
+    if exc_value is not None:
+        errno = get_errno(exc_value)
+    else:
+        errno = None
+
+    if errno is not None:
+        mechanism = mechanism or {}
+        mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
+            "number", errno
+        )
+
+    if client_options is None:
+        with_locals = True
+    else:
+        with_locals = client_options["with_locals"]
+
+    return {
+        "module": get_type_module(exc_type),
+        "type": get_type_name(exc_type),
+        "value": safe_str(exc_value),
+        "mechanism": mechanism,
+        "stacktrace": stacktrace_from_traceback(tb, with_locals),
+    }
+
+
+HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
+
+if HAS_CHAINED_EXCEPTIONS:
+
+    def walk_exception_chain(exc_info):
+        # type: (ExcInfo) -> Iterator[ExcInfo]
+        exc_type, exc_value, tb = exc_info
+
+        seen_exceptions = []
+        seen_exception_ids = set()  # type: Set[int]
+
+        while (
+            exc_type is not None
+            and exc_value is not None
+            and id(exc_value) not in seen_exception_ids
+        ):
+            yield exc_type, exc_value, tb
+
+            # Avoid hashing random types we don't know anything
+            # about. Use the list to keep a ref so that the `id` is
+            # not used for another object.
+            seen_exceptions.append(exc_value)
+            seen_exception_ids.add(id(exc_value))
+
+            if exc_value.__suppress_context__:
+                cause = exc_value.__cause__
+            else:
+                cause = exc_value.__context__
+            if cause is None:
+                break
+            exc_type = type(cause)
+            exc_value = cause
+            tb = getattr(cause, "__traceback__", None)
+
+
+else:
+
+    def walk_exception_chain(exc_info):
+        # type: (ExcInfo) -> Iterator[ExcInfo]
+        yield exc_info
+
+
+def exceptions_from_error_tuple(
+    exc_info,  # type: ExcInfo
+    client_options=None,  # type: Optional[dict]
+    mechanism=None,  # type: Optional[Dict[str, Any]]
+):
+    # type: (...) -> List[Dict[str, Any]]
+    exc_type, exc_value, tb = exc_info
+    rv = []
+    for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+        rv.append(
+            single_exception_from_error_tuple(
+                exc_type, exc_value, tb, client_options, mechanism
+            )
+        )
+
+    rv.reverse()
+
+    return rv
+
+
+def to_string(value):
+    # type: (str) -> str
+    try:
+        return text_type(value)
+    except UnicodeDecodeError:
+        return repr(value)[1:-1]
+
+
+def iter_event_stacktraces(event):
+    # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+    if "stacktrace" in event:
+        yield event["stacktrace"]
+    if "threads" in event:
+        for thread in event["threads"].get("values") or ():
+            if "stacktrace" in thread:
+                yield thread["stacktrace"]
+    if "exception" in event:
+        for exception in event["exception"].get("values") or ():
+            if "stacktrace" in exception:
+                yield exception["stacktrace"]
+
+
+def iter_event_frames(event):
+    # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+    for stacktrace in iter_event_stacktraces(event):
+        for frame in stacktrace.get("frames") or ():
+            yield frame
+
+
+def handle_in_app(event, in_app_exclude=None, in_app_include=None):
+    # type: (Dict[str, Any], Optional[List], Optional[List]) -> Dict[str, Any]
+    for stacktrace in iter_event_stacktraces(event):
+        handle_in_app_impl(
+            stacktrace.get("frames"),
+            in_app_exclude=in_app_exclude,
+            in_app_include=in_app_include,
+        )
+
+    return event
+
+
+def handle_in_app_impl(frames, in_app_exclude, in_app_include):
+    # type: (Any, Optional[List], Optional[List]) -> Optional[Any]
+    if not frames:
+        return None
+
+    any_in_app = False
+    for frame in frames:
+        in_app = frame.get("in_app")
+        if in_app is not None:
+            if in_app:
+                any_in_app = True
+            continue
+
+        module = frame.get("module")
+        if not module:
+            continue
+        elif _module_in_set(module, in_app_include):
+            frame["in_app"] = True
+            any_in_app = True
+        elif _module_in_set(module, in_app_exclude):
+            frame["in_app"] = False
+
+    if not any_in_app:
+        for frame in frames:
+            if frame.get("in_app") is None:
+                frame["in_app"] = True
+
+    return frames
+
+
+def exc_info_from_error(error):
+    # type: (Union[BaseException, ExcInfo]) -> ExcInfo
+    if isinstance(error, tuple) and len(error) == 3:
+        exc_type, exc_value, tb = error
+    elif isinstance(error, BaseException):
+        tb = getattr(error, "__traceback__", None)
+        if tb is not None:
+            exc_type = type(error)
+            exc_value = error
+        else:
+            exc_type, exc_value, tb = sys.exc_info()
+            if exc_value is not error:
+                tb = None
+                exc_value = error
+                exc_type = type(error)
+
+    else:
+        raise ValueError()
+
+    return exc_type, exc_value, tb
+
+
+def event_from_exception(
+    exc_info,  # type: Union[BaseException, ExcInfo]
+    client_options=None,  # type: Optional[dict]
+    mechanism=None,  # type: Optional[Dict[str, Any]]
+):
+    # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]]
+    exc_info = exc_info_from_error(exc_info)
+    hint = event_hint_with_exc_info(exc_info)
+    return (
+        {
+            "level": "error",
+            "exception": {
+                "values": exceptions_from_error_tuple(
+                    exc_info, client_options, mechanism
+                )
+            },
+        },
+        hint,
+    )
+
+
+def _module_in_set(name, set):
+    # type: (str, Optional[List]) -> bool
+    if not set:
+        return False
+    for item in set or ():
+        if item == name or name.startswith(item + "."):
+            return True
+    return False
+
+
+def strip_string(value, max_length=None):
+    # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
+    # TODO: read max_length from config
+    if not value:
+        return value
+
+    if max_length is None:
+        # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
+        max_length = MAX_STRING_LENGTH
+
+    length = len(value)
+
+    if length > max_length:
+        return AnnotatedValue(
+            value=value[: max_length - 3] + u"...",
+            metadata={
+                "len": length,
+                "rem": [["!limit", "x", max_length - 3, max_length]],
+            },
+        )
+    return value
+
+
+def _is_threading_local_monkey_patched():
+    # type: () -> bool
+    try:
+        from gevent.monkey import is_object_patched  # type: ignore
+
+        if is_object_patched("threading", "local"):
+            return True
+    except ImportError:
+        pass
+
+    try:
+        from eventlet.patcher import is_monkey_patched  # type: ignore
+
+        if is_monkey_patched("thread"):
+            return True
+    except ImportError:
+        pass
+
+    return False
+
+
+def _get_contextvars():
+    # () -> (bool, Type)
+    """
+    Try to import contextvars and use it if it's deemed safe. We should not use
+    contextvars if gevent or eventlet have patched thread locals, as
+    contextvars are unaffected by that patch.
+
+    https://github.com/gevent/gevent/issues/1407
+    """
+    if not _is_threading_local_monkey_patched():
+        try:
+            from contextvars import ContextVar  # type: ignore
+
+            if not PY2 and sys.version_info < (3, 7):
+                import aiocontextvars  # type: ignore  # noqa
+
+            return True, ContextVar
+        except ImportError:
+            pass
+
+    from threading import local
+
+    class ContextVar(object):  # type: ignore
+        # Super-limited impl of ContextVar
+
+        def __init__(self, name):
+            self._name = name
+            self._local = local()
+
+        def get(self, default):
+            return getattr(self._local, "value", default)
+
+        def set(self, value):
+            setattr(self._local, "value", value)
+
+    return False, ContextVar
+
+
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+
+
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    # Methods in Python 2
+    try:
+        return "%s.%s.%s" % (
+            func.im_class.__module__,  # type: ignore
+            func.im_class.__name__,  # type: ignore
+            func.__name__,
+        )
+    except Exception:
+        pass
+
+    func_qualname = (
+        getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
+    )  # type: Optional[str]
+
+    if not func_qualname:
+        # No idea what it is
+        return None
+
+    # Methods in Python 3
+    # Functions
+    # Classes
+    try:
+        return "%s.%s" % (func.__module__, func_qualname)
+    except Exception:
+        pass
+
+    # Possibly a lambda
+    return func_qualname
diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py
new file mode 100644
index 0000000..304a77f
--- /dev/null
+++ b/sentry_sdk/worker.py
@@ -0,0 +1,132 @@
+import os
+
+from threading import Thread, Lock
+from time import sleep, time
+from sentry_sdk._compat import queue, check_thread_support
+from sentry_sdk.utils import logger
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from queue import Queue
+    from typing import Any
+    from typing import Optional
+    from typing import Callable
+
+
+_TERMINATOR = object()
+
+
+class BackgroundWorker(object):
+    def __init__(self):
+        # type: () -> None
+        check_thread_support()
+        self._queue = queue.Queue(-1)  # type: Queue[Any]
+        self._lock = Lock()
+        self._thread = None  # type: Optional[Thread]
+        self._thread_for_pid = None  # type: Optional[int]
+
+    @property
+    def is_alive(self):
+        # type: () -> bool
+        if self._thread_for_pid != os.getpid():
+            return False
+        if not self._thread:
+            return False
+        return self._thread.is_alive()
+
+    def _ensure_thread(self):
+        # type: () -> None
+        if not self.is_alive:
+            self.start()
+
+    def _timed_queue_join(self, timeout):
+        # type: (float) -> bool
+        deadline = time() + timeout
+        queue = self._queue
+
+        real_all_tasks_done = getattr(
+            queue, "all_tasks_done", None
+        )  # type: Optional[Any]
+        if real_all_tasks_done is not None:
+            real_all_tasks_done.acquire()
+            all_tasks_done = real_all_tasks_done  # type: Optional[Any]
+        elif queue.__module__.startswith("eventlet."):
+            all_tasks_done = getattr(queue, "_cond", None)
+        else:
+            all_tasks_done = None
+
+        try:
+            while queue.unfinished_tasks:  # type: ignore
+                delay = deadline - time()
+                if delay <= 0:
+                    return False
+                if all_tasks_done is not None:
+                    all_tasks_done.wait(timeout=delay)
+                else:
+                    # worst case, we just poll the number of remaining tasks
+                    sleep(0.1)
+
+            return True
+        finally:
+            if real_all_tasks_done is not None:
+                real_all_tasks_done.release()  # type: ignore
+
+    def start(self):
+        # type: () -> None
+        with self._lock:
+            if not self.is_alive:
+                self._thread = Thread(
+                    target=self._target, name="raven-sentry.BackgroundWorker"
+                )
+                self._thread.setDaemon(True)
+                self._thread.start()
+                self._thread_for_pid = os.getpid()
+
+    def kill(self):
+        # type: () -> None
+        logger.debug("background worker got kill request")
+        with self._lock:
+            if self._thread:
+                self._queue.put_nowait(_TERMINATOR)
+                self._thread = None
+                self._thread_for_pid = None
+
+    def flush(self, timeout, callback=None):
+        # type: (float, Optional[Any]) -> None
+        logger.debug("background worker got flush request")
+        with self._lock:
+            if self.is_alive and timeout > 0.0:
+                self._wait_flush(timeout, callback)
+        logger.debug("background worker flushed")
+
+    def _wait_flush(self, timeout, callback):
+        # type: (float, Optional[Any]) -> None
+        initial_timeout = min(0.1, timeout)
+        if not self._timed_queue_join(initial_timeout):
+            pending = self._queue.qsize()
+            logger.debug("%d event(s) pending on flush", pending)
+            if callback is not None:
+                callback(pending, timeout)
+            self._timed_queue_join(timeout - initial_timeout)
+
+    def submit(self, callback):
+        # type: (Callable[[], None]) -> None
+        self._ensure_thread()
+        self._queue.put_nowait(callback)
+
+    def _target(self):
+        # type: () -> None
+        while True:
+            callback = self._queue.get()
+            try:
+                if callback is _TERMINATOR:
+                    break
+                try:
+                    callback()
+                except Exception:
+                    logger.error("Failed processing job", exc_info=True)
+            finally:
+                self._queue.task_done()
+            sleep(0)
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..2a9acf1
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal = 1
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..450f2fb
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+"""
+Sentry-Python - Sentry SDK for Python
+=====================================
+
+**Sentry-Python is an SDK for Sentry.** Check out `GitHub
+`_ to find out more.
+"""
+
+from setuptools import setup, find_packages
+
+setup(
+    name="sentry-sdk",
+    version="0.12.2",
+    author="Sentry Team and Contributors",
+    author_email="hello@getsentry.com",
+    url="https://github.com/getsentry/sentry-python",
+    description="Python client for Sentry (https://getsentry.com)",
+    long_description=__doc__,
+    packages=find_packages(exclude=("tests", "tests.*")),
+    # PEP 561
+    package_data={"sentry_sdk": ["py.typed"]},
+    zip_safe=False,
+    license="BSD",
+    install_requires=["urllib3>=1.9", "certifi"],
+    extras_require={
+        "flask": ["flask>=0.8", "blinker>=1.1"],
+        "bottle": ["bottle>=0.12.13"],
+        "falcon": ["falcon>=1.4"],
+    },
+    classifiers=[
+        "Development Status :: 5 - Production/Stable",
+        "Environment :: Web Environment",
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: BSD License",
+        "Operating System :: OS Independent",
+        "Programming Language :: Python",
+        "Programming Language :: Python :: 2",
+        "Programming Language :: Python :: 2.7",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.4",
+        "Programming Language :: Python :: 3.5",
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Topic :: Software Development :: Libraries :: Python Modules",
+    ],
+)
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..7df9102
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,9 @@
+hypothesis==3.69.9
+pytest==3.7.3
+pytest-xdist==1.23.0
+tox==3.7.0
+Werkzeug==0.15.3
+pytest-localserver==0.4.1
+pytest-cov==2.6.0
+gevent
+eventlet
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..cac15f9
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,13 @@
+import sys
+
+import pytest
+
+# This is used in _capture_internal_warnings. We need to run this at import
+# time because that's where many deprecation warnings might get thrown.
+#
+# This lives in tests/__init__.py because apparently even tests/conftest.py
+# gets loaded too late.
+assert "sentry_sdk" not in sys.modules
+
+_warning_recorder_mgr = pytest.warns(None)
+_warning_recorder = _warning_recorder_mgr.__enter__()
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..0f10f03
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,269 @@
+import os
+import subprocess
+import json
+
+import pytest
+
+import gevent
+import eventlet
+
+import sentry_sdk
+from sentry_sdk._compat import reraise, string_types, iteritems
+from sentry_sdk.transport import Transport
+
+from tests import _warning_recorder, _warning_recorder_mgr
+
+SEMAPHORE = "./semaphore"
+
+if not os.path.isfile(SEMAPHORE):
+    SEMAPHORE = None
+
+
+try:
+    import pytest_benchmark
+except ImportError:
+
+    @pytest.fixture
+    def benchmark():
+        return lambda x: x()
+
+
+else:
+    del pytest_benchmark
+
+
+@pytest.fixture(autouse=True)
+def internal_exceptions(request, monkeypatch):
+    errors = []
+    if "tests_internal_exceptions" in request.keywords:
+        return
+
+    def _capture_internal_exception(self, exc_info):
+        errors.append(exc_info)
+
+    @request.addfinalizer
+    def _():
+        for e in errors:
+            reraise(*e)
+
+    monkeypatch.setattr(
+        sentry_sdk.Hub, "_capture_internal_exception", _capture_internal_exception
+    )
+
+    return errors
+
+
+@pytest.fixture(autouse=True, scope="session")
+def _capture_internal_warnings():
+    yield
+
+    _warning_recorder_mgr.__exit__(None, None, None)
+    recorder = _warning_recorder
+
+    for warning in recorder:
+        try:
+            if isinstance(warning.message, ResourceWarning):
+                continue
+        except NameError:
+            pass
+
+        # pytest-django
+        if "getfuncargvalue" in str(warning.message):
+            continue
+
+        # Happens when re-initializing the SDK
+        if "but it was only enabled on init()" in str(warning.message):
+            continue
+
+        # sanic's usage of aiohttp for test client
+        if "verify_ssl is deprecated, use ssl=False instead" in str(warning.message):
+            continue
+
+        if "getargspec" in str(warning.message) and warning.filename.endswith(
+            ("pyramid/config/util.py", "pyramid/config/views.py")
+        ):
+            continue
+
+        if "isAlive() is deprecated" in str(
+            warning.message
+        ) and warning.filename.endswith("celery/utils/timer2.py"):
+            continue
+
+        if "collections.abc" in str(warning.message) and warning.filename.endswith(
+            ("celery/canvas.py", "werkzeug/datastructures.py", "tornado/httputil.py")
+        ):
+            continue
+
+        # Django 1.7 emits a (seemingly) false-positive warning for our test
+        # app and suggests to use a middleware that does not exist in later
+        # Django versions.
+        if "SessionAuthenticationMiddleware" in str(warning.message):
+            continue
+
+        if "Something has already installed a non-asyncio" in str(warning.message):
+            continue
+
+        if "dns.hash" in str(warning.message) or "dns/namedict" in warning.filename:
+            continue
+
+        raise AssertionError(warning)
+
+
+@pytest.fixture
+def monkeypatch_test_transport(monkeypatch, semaphore_normalize):
+    def check_event(event):
+        def check_string_keys(map):
+            for key, value in iteritems(map):
+                assert isinstance(key, string_types)
+                if isinstance(value, dict):
+                    check_string_keys(value)
+
+        check_string_keys(event)
+        semaphore_normalize(event)
+
+    def inner(client):
+        monkeypatch.setattr(client, "transport", TestTransport(check_event))
+
+    return inner
+
+
+def _no_errors_in_semaphore_response(obj):
+    """Assert that semaphore didn't throw any errors when processing the
+    event."""
+
+    def inner(obj):
+        if not isinstance(obj, dict):
+            return
+
+        assert "err" not in obj
+
+        for value in obj.values():
+            inner(value)
+
+    try:
+        inner(obj.get("_meta"))
+        inner(obj.get(""))
+    except AssertionError:
+        raise AssertionError(obj)
+
+
+@pytest.fixture
+def semaphore_normalize(tmpdir):
+    def inner(event):
+        if not SEMAPHORE:
+            return
+
+        # Disable subprocess integration
+        with sentry_sdk.Hub(None):
+            # not dealing with the subprocess API right now
+            file = tmpdir.join("event")
+            file.write(json.dumps(dict(event)))
+            output = json.loads(
+                subprocess.check_output(
+                    [SEMAPHORE, "process-event"], stdin=file.open()
+                ).decode("utf-8")
+            )
+            _no_errors_in_semaphore_response(output)
+            output.pop("_meta", None)
+            return output
+
+    return inner
+
+
+@pytest.fixture
+def sentry_init(monkeypatch_test_transport):
+    def inner(*a, **kw):
+        hub = sentry_sdk.Hub.current
+        client = sentry_sdk.Client(*a, **kw)
+        hub.bind_client(client)
+        monkeypatch_test_transport(sentry_sdk.Hub.current.client)
+
+    return inner
+
+
+class TestTransport(Transport):
+    def __init__(self, capture_event_callback):
+        Transport.__init__(self)
+        self.capture_event = capture_event_callback
+        self._queue = None
+
+
+@pytest.fixture
+def capture_events(monkeypatch):
+    def inner():
+        events = []
+        test_client = sentry_sdk.Hub.current.client
+        old_capture_event = test_client.transport.capture_event
+
+        def append(event):
+            events.append(event)
+            return old_capture_event(event)
+
+        monkeypatch.setattr(test_client.transport, "capture_event", append)
+        return events
+
+    return inner
+
+
+@pytest.fixture
+def capture_events_forksafe(monkeypatch):
+    def inner():
+        events_r, events_w = os.pipe()
+        events_r = os.fdopen(events_r, "rb", 0)
+        events_w = os.fdopen(events_w, "wb", 0)
+
+        test_client = sentry_sdk.Hub.current.client
+
+        old_capture_event = test_client.transport.capture_event
+
+        def append(event):
+            events_w.write(json.dumps(event).encode("utf-8"))
+            events_w.write(b"\n")
+            return old_capture_event(event)
+
+        def flush(timeout=None, callback=None):
+            events_w.write(b"flush\n")
+
+        monkeypatch.setattr(test_client.transport, "capture_event", append)
+        monkeypatch.setattr(test_client, "flush", flush)
+
+        return EventStreamReader(events_r)
+
+    return inner
+
+
+class EventStreamReader(object):
+    def __init__(self, file):
+        self.file = file
+
+    def read_event(self):
+        return json.loads(self.file.readline().decode("utf-8"))
+
+    def read_flush(self):
+        assert self.file.readline() == b"flush\n"
+
+
+# scope=session ensures that fixture is run earlier
+@pytest.fixture(scope="session", params=[None, "eventlet", "gevent"])
+def maybe_monkeypatched_threading(request):
+    if request.param == "eventlet":
+        try:
+            eventlet.monkey_patch()
+        except AttributeError as e:
+            if "'thread.RLock' object has no attribute" in str(e):
+                # https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7
+                pytest.skip("https://github.com/eventlet/eventlet/issues/546")
+            else:
+                raise
+    elif request.param == "gevent":
+        try:
+            gevent.monkey.patch_all()
+        except Exception as e:
+            if "_RLock__owner" in str(e):
+                pytest.skip("https://github.com/gevent/gevent/issues/1380")
+            else:
+                raise
+    else:
+        assert request.param is None
+
+    return request.param
diff --git a/tests/integrations/__init__.py b/tests/integrations/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/aiohttp/__init__.py b/tests/integrations/aiohttp/__init__.py
new file mode 100644
index 0000000..b4711aa
--- /dev/null
+++ b/tests/integrations/aiohttp/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+aiohttp = pytest.importorskip("aiohttp")
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
new file mode 100644
index 0000000..674802c
--- /dev/null
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -0,0 +1,103 @@
+from aiohttp import web
+
+from sentry_sdk.integrations.aiohttp import AioHttpIntegration
+
+
+async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 500
+
+    event, = events
+
+    assert (
+        event["transaction"]
+        == "tests.integrations.aiohttp.test_aiohttp.test_basic..hello"
+    )
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    request = event["request"]
+    host = request["headers"]["Host"]
+
+    assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
+    assert request["method"] == "GET"
+    assert request["query_string"] == ""
+    assert request["url"] == "http://{host}/".format(host=host)
+    assert request["headers"] == {
+        "Accept": "*/*",
+        "Accept-Encoding": "gzip, deflate",
+        "Host": host,
+        "User-Agent": request["headers"]["User-Agent"],
+    }
+
+
+async def test_403_not_captured(sentry_init, aiohttp_client, loop, capture_events):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        raise web.HTTPForbidden()
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 403
+
+    assert not events
+
+
+async def test_half_initialized(sentry_init, aiohttp_client, loop, capture_events):
+    sentry_init(integrations=[AioHttpIntegration()])
+    sentry_init()
+
+    async def hello(request):
+        return web.Response(text="hello")
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 200
+
+    assert events == []
+
+
+async def test_tracing(sentry_init, aiohttp_client, loop, capture_events):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        return web.Response(text="hello")
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 200
+
+    event, = events
+
+    assert event["type"] == "transaction"
+    assert (
+        event["transaction"]
+        == "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
+    )
diff --git a/tests/integrations/argv/test_argv.py b/tests/integrations/argv/test_argv.py
new file mode 100644
index 0000000..b0eae83
--- /dev/null
+++ b/tests/integrations/argv/test_argv.py
@@ -0,0 +1,16 @@
+import sys
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.argv import ArgvIntegration
+
+
+def test_basic(sentry_init, capture_events, monkeypatch):
+    sentry_init(integrations=[ArgvIntegration()])
+
+    argv = ["foo", "bar", "baz"]
+    monkeypatch.setattr(sys, "argv", argv)
+
+    events = capture_events()
+    capture_message("hi")
+    event, = events
+    assert event["extra"]["sys.argv"] == argv
diff --git a/tests/integrations/asgi/__init__.py b/tests/integrations/asgi/__init__.py
new file mode 100644
index 0000000..c89ddf9
--- /dev/null
+++ b/tests/integrations/asgi/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlette")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
new file mode 100644
index 0000000..3a47eac
--- /dev/null
+++ b/tests/integrations/asgi/test_asgi.py
@@ -0,0 +1,121 @@
+import sys
+
+import pytest
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from starlette.applications import Starlette
+from starlette.responses import PlainTextResponse
+from starlette.testclient import TestClient
+
+
+@pytest.fixture
+def app():
+    app = Starlette()
+
+    @app.route("/sync-message")
+    def hi(request):
+        capture_message("hi", level="error")
+        return PlainTextResponse("ok")
+
+    @app.route("/async-message")
+    async def hi2(request):
+        capture_message("hi", level="error")
+        return PlainTextResponse("ok")
+
+    app.add_middleware(SentryAsgiMiddleware)
+
+    return app
+
+
+@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
+def test_sync_request_data(sentry_init, app, capture_events):
+    sentry_init(send_default_pii=True)
+    events = capture_events()
+
+    client = TestClient(app)
+    response = client.get("/sync-message?foo=bar", headers={"Foo": u"ä"})
+
+    assert response.status_code == 200
+
+    event, = events
+    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi"
+    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
+    assert set(event["request"]["headers"]) == {
+        "accept",
+        "accept-encoding",
+        "connection",
+        "host",
+        "user-agent",
+        "foo",
+    }
+    assert event["request"]["query_string"] == "foo=bar"
+    assert event["request"]["url"].endswith("/sync-message")
+    assert event["request"]["method"] == "GET"
+
+    # Assert that state is not leaked
+    events.clear()
+    capture_message("foo")
+    event, = events
+
+    assert "request" not in event
+    assert "transaction" not in event
+
+
+def test_async_request_data(sentry_init, app, capture_events):
+    sentry_init(send_default_pii=True)
+    events = capture_events()
+
+    client = TestClient(app)
+    response = client.get("/async-message?foo=bar")
+
+    assert response.status_code == 200
+
+    event, = events
+    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi2"
+    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
+    assert set(event["request"]["headers"]) == {
+        "accept",
+        "accept-encoding",
+        "connection",
+        "host",
+        "user-agent",
+    }
+    assert event["request"]["query_string"] == "foo=bar"
+    assert event["request"]["url"].endswith("/async-message")
+    assert event["request"]["method"] == "GET"
+
+    # Assert that state is not leaked
+    events.clear()
+    capture_message("foo")
+    event, = events
+
+    assert "request" not in event
+    assert "transaction" not in event
+
+
+def test_errors(sentry_init, app, capture_events):
+    sentry_init(send_default_pii=True)
+    events = capture_events()
+
+    @app.route("/error")
+    def myerror(request):
+        raise ValueError("oh no")
+
+    client = TestClient(app, raise_server_exceptions=False)
+    response = client.get("/error")
+
+    assert response.status_code == 500
+
+    event, = events
+    assert (
+        event["transaction"]
+        == "tests.integrations.asgi.test_asgi.test_errors..myerror"
+    )
+    exception, = event["exception"]["values"]
+
+    assert exception["type"] == "ValueError"
+    assert exception["value"] == "oh no"
+    assert any(
+        frame["filename"].endswith("tests/integrations/asgi/test_asgi.py")
+        for frame in exception["stacktrace"]["frames"]
+    )
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
new file mode 100644
index 0000000..1f443ab
--- /dev/null
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -0,0 +1,227 @@
+import base64
+import json
+import os
+import shutil
+import subprocess
+import sys
+import uuid
+from textwrap import dedent
+
+import pytest
+
+boto3 = pytest.importorskip("boto3")
+
+LAMBDA_PRELUDE = """
+from __future__ import print_function
+
+from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
+import sentry_sdk
+import json
+from sentry_sdk.transport import Transport
+
+class TestTransport(Transport):
+    def __init__(self):
+        Transport.__init__(self)
+        self._queue = []
+
+    def capture_event(self, event):
+        self._queue.append(event)
+
+    def flush(self, timeout, callback=None):
+        # Delay event output like this to test proper shutdown
+        # Note that AWS Lambda trunchates the log output to 4kb, so you better
+        # pray that your events are smaller than that or else tests start
+        # failing.
+        for event in self._queue:
+            print("EVENT:", json.dumps(event))
+        del self._queue[:]
+
+def init_sdk(**extra_init_args):
+    sentry_sdk.init(
+        transport=TestTransport(),
+        integrations=[AwsLambdaIntegration()],
+        **extra_init_args
+    )
+"""
+
+
+@pytest.fixture
+def lambda_client():
+    if "SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" not in os.environ:
+        pytest.skip("AWS environ vars not set")
+
+    return boto3.client(
+        "lambda",
+        aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
+        aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
+        region_name="us-east-1",
+    )
+
+
+@pytest.fixture(params=["python3.6", "python3.7", "python2.7"])
+def run_lambda_function(tmpdir, lambda_client, request, semaphore_normalize):
+    def inner(code, payload):
+        runtime = request.param
+        tmpdir.ensure_dir("lambda_tmp").remove()
+        tmp = tmpdir.ensure_dir("lambda_tmp")
+
+        tmp.join("test_lambda.py").write(code)
+
+        # Check file for valid syntax first, and that the integration does not
+        # crash when not running in Lambda (but rather a local deployment tool
+        # such as chalice's)
+        subprocess.check_call([sys.executable, str(tmp.join("test_lambda.py"))])
+
+        tmp.join("setup.cfg").write("[install]\nprefix=")
+        subprocess.check_call([sys.executable, "setup.py", "sdist", "-d", str(tmpdir)])
+
+        # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html
+        subprocess.check_call("pip install ../*.tar.gz -t .", cwd=str(tmp), shell=True)
+        shutil.make_archive(tmpdir.join("ball"), "zip", str(tmp))
+
+        fn_name = "test_function_{}".format(uuid.uuid4())
+
+        lambda_client.create_function(
+            FunctionName=fn_name,
+            Runtime=runtime,
+            Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
+            Handler="test_lambda.test_handler",
+            Code={"ZipFile": tmpdir.join("ball.zip").read(mode="rb")},
+            Description="Created as part of testsuite for getsentry/sentry-python",
+        )
+
+        @request.addfinalizer
+        def delete_function():
+            lambda_client.delete_function(FunctionName=fn_name)
+
+        response = lambda_client.invoke(
+            FunctionName=fn_name,
+            InvocationType="RequestResponse",
+            LogType="Tail",
+            Payload=payload,
+        )
+
+        assert 200 <= response["StatusCode"] < 300, response
+
+        events = []
+
+        for line in base64.b64decode(response["LogResult"]).splitlines():
+            print("AWS:", line)
+            if not line.startswith(b"EVENT: "):
+                continue
+            line = line[len(b"EVENT: ") :]
+            events.append(json.loads(line.decode("utf-8")))
+            semaphore_normalize(events[-1])
+
+        return events, response
+
+    return inner
+
+
+def test_basic(run_lambda_function):
+    events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk()
+        def test_handler(event, context):
+            raise Exception("something went wrong")
+        """
+        ),
+        b'{"foo": "bar"}',
+    )
+
+    assert response["FunctionError"] == "Unhandled"
+
+    event, = events
+    assert event["level"] == "error"
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "something went wrong"
+
+    frame1, = exception["stacktrace"]["frames"]
+    assert frame1["filename"] == "test_lambda.py"
+    assert frame1["abs_path"] == "/var/task/test_lambda.py"
+    assert frame1["function"] == "test_handler"
+
+    assert frame1["in_app"] is True
+
+    assert exception["mechanism"] == {"type": "aws_lambda", "handled": False}
+
+    assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
+
+
+def test_initialization_order(run_lambda_function):
+    """Zappa lazily imports our code, so by the time we monkeypatch the handler
+    as seen by AWS already runs. At this point at least draining the queue
+    should work."""
+
+    events, _response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+            def test_handler(event, context):
+                init_sdk()
+                sentry_sdk.capture_exception(Exception("something went wrong"))
+        """
+        ),
+        b'{"foo": "bar"}',
+    )
+
+    event, = events
+    assert event["level"] == "error"
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "something went wrong"
+
+
+def test_request_data(run_lambda_function):
+    events, _response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk()
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            return "ok"
+        """
+        ),
+        payload=b"""
+        {
+          "resource": "/asd",
+          "path": "/asd",
+          "httpMethod": "GET",
+          "headers": {
+            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
+            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
+            "X-Forwarded-Proto": "https"
+          },
+          "queryStringParameters": {
+            "bonkers": "true"
+          },
+          "pathParameters": null,
+          "stageVariables": null,
+          "requestContext": {
+            "identity": {
+              "sourceIp": "213.47.147.207",
+              "userArn": "42"
+            }
+          },
+          "body": null,
+          "isBase64Encoded": false
+        }
+        """,
+    )
+
+    event, = events
+
+    assert event["request"] == {
+        "headers": {
+            "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
+            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
+            "X-Forwarded-Proto": "https",
+        },
+        "method": "GET",
+        "query_string": {"bonkers": "true"},
+        "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
+    }
diff --git a/tests/integrations/beam/test_beam.py b/tests/integrations/beam/test_beam.py
new file mode 100644
index 0000000..4fae5b2
--- /dev/null
+++ b/tests/integrations/beam/test_beam.py
@@ -0,0 +1,203 @@
+import pytest
+import inspect
+
+pytest.importorskip("apache_beam")
+
+import dill
+
+from sentry_sdk.integrations.beam import (
+    BeamIntegration,
+    _wrap_task_call,
+    _wrap_inspect_call,
+)
+
+from apache_beam.typehints.trivial_inference import instance_to_type
+from apache_beam.typehints.decorators import getcallargs_forhints
+from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
+from apache_beam.runners.common import DoFnInvoker, OutputProcessor, DoFnContext
+from apache_beam.utils.windowed_value import WindowedValue
+
+
+def foo():
+    return True
+
+
+def bar(x, y):
+    # print(x + y)
+    return True
+
+
+def baz(x, y=2):
+    # print(x + y)
+    return True
+
+
+class A:
+    def __init__(self, fn):
+        self.r = "We are in A"
+        self.fn = fn
+        setattr(self, "_inspect_fn", _wrap_inspect_call(self, "fn"))
+
+    def process(self):
+        return self.fn()
+
+
+class B(A, object):
+    def fa(self, x, element=False, another_element=False):
+        if x or (element and not another_element):
+            # print(self.r)
+            return True
+        1 / 0
+        return False
+
+    def __init__(self):
+        self.r = "We are in B"
+        super(B, self).__init__(self.fa)
+
+
+class SimpleFunc(DoFn):
+    def process(self, x):
+        if x:
+            1 / 0
+        return [True]
+
+
+class PlaceHolderFunc(DoFn):
+    def process(self, x, timestamp=DoFn.TimestampParam, wx=DoFn.WindowParam):
+        if isinstance(timestamp, _DoFnParam) or isinstance(wx, _DoFnParam):
+            raise Exception("Bad instance")
+        if x:
+            1 / 0
+        yield True
+
+
+def fail(x):
+    if x:
+        1 / 0
+    return [True]
+
+
+test_parent = A(foo)
+test_child = B()
+test_simple = SimpleFunc()
+test_place_holder = PlaceHolderFunc()
+test_callable = CallableWrapperDoFn(fail)
+
+
+# Cannot call simple functions or placeholder test.
+@pytest.mark.parametrize(
+    "obj,f,args,kwargs",
+    [
+        [test_parent, "fn", (), {}],
+        [test_child, "fn", (False,), {"element": True}],
+        [test_child, "fn", (True,), {}],
+        [test_simple, "process", (False,), {}],
+        [test_callable, "process", (False,), {}],
+    ],
+)
+def test_monkey_patch_call(obj, f, args, kwargs):
+    func = getattr(obj, f)
+
+    assert func(*args, **kwargs)
+    assert _wrap_task_call(func)(*args, **kwargs)
+
+
+@pytest.mark.parametrize("f", [foo, bar, baz, test_parent.fn, test_child.fn])
+def test_monkey_patch_pickle(f):
+    f_temp = _wrap_task_call(f)
+    assert dill.pickles(f_temp), "{} is not pickling correctly!".format(f)
+
+    # Pickle everything
+    s1 = dill.dumps(f_temp)
+    s2 = dill.loads(s1)
+    dill.dumps(s2)
+
+
+@pytest.mark.parametrize(
+    "f,args,kwargs",
+    [
+        [foo, (), {}],
+        [bar, (1, 5), {}],
+        [baz, (1,), {}],
+        [test_parent.fn, (), {}],
+        [test_child.fn, (False,), {"element": True}],
+        [test_child.fn, (True,), {}],
+    ],
+)
+def test_monkey_patch_signature(f, args, kwargs):
+    arg_types = [instance_to_type(v) for v in args]
+    kwargs_types = {k: instance_to_type(v) for (k, v) in kwargs.items()}
+    f_temp = _wrap_task_call(f)
+    try:
+        getcallargs_forhints(f, *arg_types, **kwargs_types)
+    except Exception:
+        print("Failed on {} with parameters {}, {}".format(f, args, kwargs))
+        raise
+    try:
+        getcallargs_forhints(f_temp, *arg_types, **kwargs_types)
+    except Exception:
+        print("Failed on {} with parameters {}, {}".format(f_temp, args, kwargs))
+        raise
+    try:
+        expected_signature = inspect.signature(f)
+        test_signature = inspect.signature(f_temp)
+        assert (
+            expected_signature == test_signature
+        ), "Failed on {}, signature {} does not match {}".format(
+            f, expected_signature, test_signature
+        )
+    except Exception:
+        # expected to pass for py2.7
+        pass
+
+
+class _OutputProcessor(OutputProcessor):
+    def process_outputs(self, windowed_input_element, results):
+        print(windowed_input_element)
+        try:
+            for result in results:
+                assert result
+        except StopIteration:
+            print("In here")
+
+
+@pytest.fixture
+def init_beam(sentry_init):
+    def inner(fn):
+        sentry_init(default_integrations=False, integrations=[BeamIntegration()])
+        # Little hack to avoid having to run the whole pipeline.
+        pardo = ParDo(fn)
+        signature = pardo._signature
+        output_processor = _OutputProcessor()
+        return DoFnInvoker.create_invoker(
+            signature, output_processor, DoFnContext("test")
+        )
+
+    return inner
+
+
+@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
+def test_invoker_normal(init_beam, fn):
+    invoker = init_beam(fn)
+    print("Normal testing {} with {} invoker.".format(fn, invoker))
+    windowed_value = WindowedValue(False, 0, [None])
+    invoker.invoke_process(windowed_value)
+
+
+@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
+def test_invoker_exception(init_beam, capture_events, capture_exceptions, fn):
+    invoker = init_beam(fn)
+    events = capture_events()
+
+    print("Exception testing {} with {} invoker.".format(fn, invoker))
+    # Window value will always have one value for the process to run.
+    windowed_value = WindowedValue(True, 0, [None])
+    try:
+        invoker.invoke_process(windowed_value)
+    except Exception:
+        pass
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    assert exception["mechanism"]["type"] == "beam"
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
new file mode 100644
index 0000000..b20675b
--- /dev/null
+++ b/tests/integrations/bottle/test_bottle.py
@@ -0,0 +1,444 @@
+import json
+import pytest
+import logging
+
+
+pytest.importorskip("bottle")
+
+from io import BytesIO
+from bottle import Bottle, debug as set_debug, abort, redirect
+from sentry_sdk import capture_message
+
+from sentry_sdk.integrations.logging import LoggingIntegration
+from werkzeug.test import Client
+
+import sentry_sdk.integrations.bottle as bottle_sentry
+
+
+@pytest.fixture(scope="function")
+def app(sentry_init):
+    app = Bottle()
+
+    @app.route("/message")
+    def hi():
+        capture_message("hi")
+        return "ok"
+
+    @app.route("/message-named-route", name="hi")
+    def named_hi():
+        capture_message("hi")
+        return "ok"
+
+    yield app
+
+
+@pytest.fixture
+def get_client(app):
+    def inner():
+        return Client(app)
+
+    return inner
+
+
+def test_has_context(sentry_init, app, capture_events, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+    events = capture_events()
+
+    client = get_client()
+    response = client.get("/message")
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["message"] == "hi"
+    assert "data" not in event["request"]
+    assert event["request"]["url"] == "http://localhost/message"
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction",
+    [
+        ("/message", "endpoint", "hi"),
+        ("/message", "url", "/message"),
+        ("/message-named-route", "endpoint", "hi"),
+    ],
+)
+def test_transaction_style(
+    sentry_init,
+    app,
+    capture_events,
+    transaction_style,
+    expected_transaction,
+    url,
+    get_client,
+):
+    sentry_init(
+        integrations=[
+            bottle_sentry.BottleIntegration(transaction_style=transaction_style)
+        ]
+    )
+    events = capture_events()
+
+    client = get_client()
+    response = client.get("/message")
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["transaction"].endswith(expected_transaction)
+
+
+@pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
+@pytest.mark.parametrize("catchall", (True, False), ids=["catchall", "nocatchall"])
+def test_errors(
+    sentry_init, capture_exceptions, capture_events, app, debug, catchall, get_client
+):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    app.catchall = catchall
+    set_debug(mode=debug)
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        1 / 0
+
+    client = get_client()
+    try:
+        client.get("/")
+    except ZeroDivisionError:
+        pass
+
+    exc, = exceptions
+    assert isinstance(exc, ZeroDivisionError)
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
+    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
+
+
+def test_large_json_request(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    data = {"foo": {"bar": "a" * 2000}}
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert bottle.request.json == data
+        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.get("/")
+
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response[1] == "200 OK"
+
+    event, = events
+    # __import__("pdb").set_trace()
+    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+
+
+@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
+def test_empty_json_request(sentry_init, capture_events, app, data, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert bottle.request.json == data
+        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
+        # assert not bottle.request.forms
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["request"]["data"] == data
+
+
+def test_medium_formdata_request(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    data = {"foo": "a" * 2000}
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert bottle.request.forms["foo"] == data["foo"]
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.post("/", data=data)
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]) == 512
+
+
+@pytest.mark.parametrize("input_char", [u"a", b"a"])
+def test_too_large_raw_request(
+    sentry_init, input_char, capture_events, app, get_client
+):
+    sentry_init(
+        integrations=[bottle_sentry.BottleIntegration()], request_bodies="small"
+    )
+
+    data = input_char * 2000
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        if isinstance(data, bytes):
+            assert bottle.request.body.read() == data
+        else:
+            assert bottle.request.body.read() == data.encode("ascii")
+        assert not bottle.request.json
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.post("/", data=data)
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["_meta"]["request"]["data"] == {
+        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
+    }
+    assert not event["request"]["data"]
+
+
+def test_files_and_form(sentry_init, capture_events, app, get_client):
+    sentry_init(
+        integrations=[bottle_sentry.BottleIntegration()], request_bodies="always"
+    )
+
+    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert list(bottle.request.forms) == ["foo"]
+        assert list(bottle.request.files) == ["file"]
+        assert not bottle.request.json
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.post("/", data=data)
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]) == 512
+
+    assert event["_meta"]["request"]["data"]["file"] == {
+        "": {
+            "len": -1,
+            "rem": [["!raw", "x", 0, -1]],
+        }  # bottle default content-length is -1
+    }
+    assert not event["request"]["data"]["file"]
+
+
+@pytest.mark.parametrize(
+    "integrations",
+    [
+        [bottle_sentry.BottleIntegration()],
+        [bottle_sentry.BottleIntegration(), LoggingIntegration(event_level="ERROR")],
+    ],
+)
+def test_errors_not_reported_twice(
+    sentry_init, integrations, capture_events, app, get_client
+):
+    sentry_init(integrations=integrations)
+
+    app.catchall = False
+
+    logger = logging.getLogger("bottle.app")
+
+    @app.route("/")
+    def index():
+        try:
+            1 / 0
+        except Exception as e:
+            logger.exception(e)
+            raise e
+
+    events = capture_events()
+
+    client = get_client()
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    assert len(events) == 1
+
+
+def test_logging(sentry_init, capture_events, app, get_client):
+    # ensure that Bottle's logger magic doesn't break ours
+    sentry_init(
+        integrations=[
+            bottle_sentry.BottleIntegration(),
+            LoggingIntegration(event_level="ERROR"),
+        ]
+    )
+
+    @app.route("/")
+    def index():
+        app.logger.error("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+    client.get("/")
+
+    event, = events
+    assert event["level"] == "error"
+
+
+def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    app.catchall = False
+
+    def crashing_app(environ, start_response):
+        1 / 0
+
+    app.mount("/wsgi/", crashing_app)
+
+    client = Client(app)
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    with pytest.raises(ZeroDivisionError) as exc:
+        client.get("/wsgi/")
+
+    error, = exceptions
+
+    assert error is exc.value
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"] == {
+        "type": "bottle",
+        "handled": False,
+    }
+
+
+def test_500(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    set_debug(False)
+    app.catchall = True
+
+    @app.route("/")
+    def index():
+        1 / 0
+
+    @app.error(500)
+    def error_handler(err):
+        capture_message("error_msg")
+        return "My error"
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.get("/")
+    assert response[1] == "500 Internal Server Error"
+
+    _, event = events
+    assert event["message"] == "error_msg"
+
+
+def test_error_in_errorhandler(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+
+    set_debug(False)
+    app.catchall = True
+
+    @app.route("/")
+    def index():
+        raise ValueError()
+
+    @app.error(500)
+    def error_handler(err):
+        1 / 0
+
+    events = capture_events()
+
+    client = get_client()
+
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    event1, event2 = events
+
+    exception, = event1["exception"]["values"]
+    assert exception["type"] == "ValueError"
+
+    exception = event2["exception"]["values"][0]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+def test_bad_request_not_captured(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        abort(400, "bad request in")
+
+    client = get_client()
+
+    client.get("/")
+
+    assert not events
+
+
+def test_no_exception_on_redirect(sentry_init, capture_events, app, get_client):
+    sentry_init(integrations=[bottle_sentry.BottleIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        redirect("/here")
+
+    @app.route("/here")
+    def here():
+        return "here"
+
+    client = get_client()
+
+    client.get("/")
+
+    assert not events
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
new file mode 100644
index 0000000..3e3c436
--- /dev/null
+++ b/tests/integrations/celery/test_celery.py
@@ -0,0 +1,311 @@
+import threading
+
+import pytest
+
+pytest.importorskip("celery")
+
+from sentry_sdk import Hub, configure_scope
+from sentry_sdk.integrations.celery import CeleryIntegration
+from sentry_sdk._compat import text_type
+
+from celery import Celery, VERSION
+from celery.bin import worker
+
+
+@pytest.fixture
+def connect_signal(request):
+    def inner(signal, f):
+        signal.connect(f)
+        request.addfinalizer(lambda: signal.disconnect(f))
+
+    return inner
+
+
+@pytest.fixture
+def init_celery(sentry_init):
+    def inner(propagate_traces=True, **kwargs):
+        sentry_init(
+            integrations=[CeleryIntegration(propagate_traces=propagate_traces)],
+            **kwargs
+        )
+        celery = Celery(__name__)
+        if VERSION < (4,):
+            celery.conf.CELERY_ALWAYS_EAGER = True
+        else:
+            celery.conf.task_always_eager = True
+        return celery
+
+    return inner
+
+
+@pytest.fixture
+def celery(init_celery):
+    return init_celery()
+
+
+@pytest.fixture(
+    params=[
+        lambda task, x, y: (task.delay(x, y), {"args": [x, y], "kwargs": {}}),
+        lambda task, x, y: (task.apply_async((x, y)), {"args": [x, y], "kwargs": {}}),
+        lambda task, x, y: (
+            task.apply_async(args=(x, y)),
+            {"args": [x, y], "kwargs": {}},
+        ),
+        lambda task, x, y: (
+            task.apply_async(kwargs=dict(x=x, y=y)),
+            {"args": [], "kwargs": {"x": x, "y": y}},
+        ),
+    ]
+)
+def celery_invocation(request):
+    """
+    Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch).
+
+    Currently limited to a task signature of the form foo(x, y)
+    """
+    return request.param
+
+
+def test_simple(capture_events, celery, celery_invocation):
+    events = capture_events()
+
+    @celery.task(name="dummy_task")
+    def dummy_task(x, y):
+        foo = 42  # noqa
+        return x / y
+
+    with Hub.current.start_span() as span:
+        celery_invocation(dummy_task, 1, 2)
+        _, expected_context = celery_invocation(dummy_task, 1, 0)
+
+    event, = events
+
+    assert event["contexts"]["trace"]["trace_id"] == span.trace_id
+    assert event["contexts"]["trace"]["span_id"] != span.span_id
+    assert event["transaction"] == "dummy_task"
+    assert event["extra"]["celery-job"] == dict(
+        task_name="dummy_task", **expected_context
+    )
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    assert exception["mechanism"]["type"] == "celery"
+    assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
+
+
+@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
+def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
+    celery = init_celery(traces_sample_rate=1.0)
+
+    @celery.task(name="dummy_task")
+    def dummy_task(x, y):
+        return x / y
+
+    # XXX: For some reason the first call does not get instrumented properly.
+    celery_invocation(dummy_task, 1, 1)
+
+    events = capture_events()
+
+    with Hub.current.start_span(transaction="submission") as span:
+        celery_invocation(dummy_task, 1, 0 if task_fails else 1)
+
+    if task_fails:
+        error_event = events.pop(0)
+        assert error_event["contexts"]["trace"]["trace_id"] == span.trace_id
+        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+
+    execution_event, submission_event = events
+
+    assert execution_event["transaction"] == "dummy_task"
+    assert submission_event["transaction"] == "submission"
+
+    assert execution_event["type"] == submission_event["type"] == "transaction"
+    assert execution_event["contexts"]["trace"]["trace_id"] == span.trace_id
+    assert submission_event["contexts"]["trace"]["trace_id"] == span.trace_id
+
+    if task_fails:
+        assert execution_event["contexts"]["trace"]["status"] == "failure"
+    else:
+        assert "status" not in execution_event["contexts"]["trace"]
+
+    assert execution_event["spans"] == []
+    assert submission_event["spans"] == [
+        {
+            u"data": {},
+            u"description": u"dummy_task",
+            u"op": "celery.submit",
+            u"parent_span_id": submission_event["contexts"]["trace"]["span_id"],
+            u"same_process_as_parent": True,
+            u"span_id": submission_event["spans"][0]["span_id"],
+            u"start_timestamp": submission_event["spans"][0]["start_timestamp"],
+            u"tags": {},
+            u"timestamp": submission_event["spans"][0]["timestamp"],
+            u"trace_id": text_type(span.trace_id),
+        }
+    ]
+
+
+def test_no_stackoverflows(celery):
+    """We used to have a bug in the Celery integration where its monkeypatching
+    was repeated for every task invocation, leading to stackoverflows.
+
+    See https://github.com/getsentry/sentry-python/issues/265
+    """
+
+    results = []
+
+    @celery.task(name="dummy_task")
+    def dummy_task():
+        with configure_scope() as scope:
+            scope.set_tag("foo", "bar")
+
+        results.append(42)
+
+    for _ in range(10000):
+        dummy_task.delay()
+
+    assert results == [42] * 10000
+
+    with configure_scope() as scope:
+        assert not scope._tags
+
+
+def test_simple_no_propagation(capture_events, init_celery):
+    celery = init_celery(propagate_traces=False)
+    events = capture_events()
+
+    @celery.task(name="dummy_task")
+    def dummy_task():
+        1 / 0
+
+    with Hub.current.start_span() as span:
+        dummy_task.delay()
+
+    event, = events
+    assert event["contexts"]["trace"]["trace_id"] != span.trace_id
+    assert event["transaction"] == "dummy_task"
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+def test_ignore_expected(capture_events, celery):
+    events = capture_events()
+
+    @celery.task(name="dummy_task", throws=(ZeroDivisionError,))
+    def dummy_task(x, y):
+        return x / y
+
+    dummy_task.delay(1, 2)
+    dummy_task.delay(1, 0)
+    assert not events
+
+
+def test_broken_prerun(init_celery, connect_signal):
+    from celery.signals import task_prerun
+
+    stack_lengths = []
+
+    def crash(*args, **kwargs):
+        # scope should exist in prerun
+        stack_lengths.append(len(Hub.current._stack))
+        1 / 0
+
+    # Order here is important to reproduce the bug: In Celery 3, a crashing
+    # prerun would prevent other preruns from running.
+
+    connect_signal(task_prerun, crash)
+    celery = init_celery()
+
+    assert len(Hub.current._stack) == 1
+
+    @celery.task(name="dummy_task")
+    def dummy_task(x, y):
+        stack_lengths.append(len(Hub.current._stack))
+        return x / y
+
+    if VERSION >= (4,):
+        dummy_task.delay(2, 2)
+    else:
+        with pytest.raises(ZeroDivisionError):
+            dummy_task.delay(2, 2)
+
+    assert len(Hub.current._stack) == 1
+    if VERSION < (4,):
+        assert stack_lengths == [2]
+    else:
+        assert stack_lengths == [2, 2]
+
+
+@pytest.mark.xfail(
+    (4, 2, 0) <= VERSION,
+    strict=True,
+    reason="https://github.com/celery/celery/issues/4661",
+)
+def test_retry(celery, capture_events):
+    events = capture_events()
+    failures = [True, True, False]
+    runs = []
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self):
+        runs.append(1)
+        try:
+            if failures.pop(0):
+                1 / 0
+        except Exception as exc:
+            self.retry(max_retries=2, exc=exc)
+
+    dummy_task.delay()
+
+    assert len(runs) == 3
+    assert not events
+
+    failures = [True, True, True]
+    runs = []
+
+    dummy_task.delay()
+
+    assert len(runs) == 3
+    event, = events
+    exceptions = event["exception"]["values"]
+
+    for e in exceptions:
+        assert e["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.skipif(VERSION < (4,), reason="in-memory backend broken")
+def test_transport_shutdown(request, celery, capture_events_forksafe, tmpdir):
+    events = capture_events_forksafe()
+
+    celery.conf.worker_max_tasks_per_child = 1
+    celery.conf.broker_url = "memory://localhost/"
+    celery.conf.broker_backend = "memory"
+    celery.conf.result_backend = "file://{}".format(tmpdir.mkdir("celery-results"))
+    celery.conf.task_always_eager = False
+
+    runs = []
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self):
+        runs.append(1)
+        1 / 0
+
+    res = dummy_task.delay()
+
+    w = worker.worker(app=celery)
+    t = threading.Thread(target=w.run)
+    t.daemon = True
+    t.start()
+
+    with pytest.raises(Exception):
+        # Celery 4.1 raises a gibberish exception
+        res.wait()
+
+    event = events.read_event()
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+    events.read_flush()
+
+    # if this is nonempty, the worker never really forked
+    assert not runs
diff --git a/tests/integrations/conftest.py b/tests/integrations/conftest.py
new file mode 100644
index 0000000..cffb278
--- /dev/null
+++ b/tests/integrations/conftest.py
@@ -0,0 +1,21 @@
+import pytest
+import sentry_sdk
+
+
+@pytest.fixture
+def capture_exceptions(monkeypatch):
+    def inner():
+        errors = set()
+        old_capture_event = sentry_sdk.Hub.capture_event
+
+        def capture_event(self, event, hint=None):
+            if hint:
+                if "exc_info" in hint:
+                    error = hint["exc_info"][1]
+                    errors.add(error)
+            return old_capture_event(self, event, hint=hint)
+
+        monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event)
+        return errors
+
+    return inner
diff --git a/tests/integrations/django/__init__.py b/tests/integrations/django/__init__.py
new file mode 100644
index 0000000..d2555a8
--- /dev/null
+++ b/tests/integrations/django/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+django = pytest.importorskip("django")
diff --git a/tests/integrations/django/channels/__init__.py b/tests/integrations/django/channels/__init__.py
new file mode 100644
index 0000000..50e90e8
--- /dev/null
+++ b/tests/integrations/django/channels/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("channels")
diff --git a/tests/integrations/django/channels/test_channels.py b/tests/integrations/django/channels/test_channels.py
new file mode 100644
index 0000000..52f0f5a
--- /dev/null
+++ b/tests/integrations/django/channels/test_channels.py
@@ -0,0 +1,39 @@
+import pytest
+
+
+from channels.testing import HttpCommunicator
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.django import DjangoIntegration
+
+from tests.integrations.django.myapp.asgi import application
+
+
+@pytest.mark.asyncio
+async def test_basic(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+
+    comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+    # Test that the ASGI middleware got set up correctly. Right now this needs
+    # to be installed manually (see myapp/asgi.py)
+    assert event["transaction"] == "/view-exc"
+    assert event["request"] == {
+        "cookies": {},
+        "headers": {},
+        "method": "GET",
+        "query_string": "test=query",
+        "url": "/view-exc",
+    }
+
+    capture_message("hi")
+    event = events[-1]
+    assert "request" not in event
diff --git a/tests/integrations/django/myapp/__init__.py b/tests/integrations/django/myapp/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/django/myapp/asgi.py b/tests/integrations/django/myapp/asgi.py
new file mode 100644
index 0000000..30dadc0
--- /dev/null
+++ b/tests/integrations/django/myapp/asgi.py
@@ -0,0 +1,15 @@
+"""
+ASGI entrypoint. Configures Django and then runs the application
+defined in the ASGI_APPLICATION setting.
+"""
+
+import os
+import django
+from channels.routing import get_default_application
+
+os.environ.setdefault(
+    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
+)
+
+django.setup()
+application = get_default_application()
diff --git a/tests/integrations/django/myapp/manage.py b/tests/integrations/django/myapp/manage.py
new file mode 100644
index 0000000..d65c90e
--- /dev/null
+++ b/tests/integrations/django/myapp/manage.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import os
+import sys
+
+if __name__ == "__main__":
+    os.environ.setdefault(
+        "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
+    )
+
+    from django.core.management import execute_from_command_line
+
+execute_from_command_line(sys.argv)
diff --git a/tests/integrations/django/myapp/management/__init__.py b/tests/integrations/django/myapp/management/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/django/myapp/management/commands/__init__.py b/tests/integrations/django/myapp/management/commands/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/django/myapp/management/commands/mycrash.py b/tests/integrations/django/myapp/management/commands/mycrash.py
new file mode 100644
index 0000000..48764d9
--- /dev/null
+++ b/tests/integrations/django/myapp/management/commands/mycrash.py
@@ -0,0 +1,9 @@
+from django.core.management.base import BaseCommand
+
+
+class Command(BaseCommand):
+    def add_arguments(self, parser):
+        pass
+
+    def handle(self, *args, **options):
+        1 / 0
diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py
new file mode 100644
index 0000000..796d3d7
--- /dev/null
+++ b/tests/integrations/django/myapp/routing.py
@@ -0,0 +1,4 @@
+from channels.http import AsgiHandler
+from channels.routing import ProtocolTypeRouter
+
+application = ProtocolTypeRouter({"http": AsgiHandler})
diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py
new file mode 100644
index 0000000..d0c47a0
--- /dev/null
+++ b/tests/integrations/django/myapp/settings.py
@@ -0,0 +1,155 @@
+"""
+Django settings for myapp project.
+
+Generated by 'django-admin startproject' using Django 2.0.7.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/2.0/topics/settings/
+
+For the full list of settings and their values, see
+https://docs.djangoproject.com/en/2.0/ref/settings/
+"""
+
+
+# We shouldn't access settings while setting up integrations. Initialize SDK
+# here to provoke any errors that might occur.
+import sentry_sdk
+from sentry_sdk.integrations.django import DjangoIntegration
+
+sentry_sdk.init(integrations=[DjangoIntegration()])
+
+
+import os
+
+try:
+    # Django >= 1.10
+    from django.utils.deprecation import MiddlewareMixin
+except ImportError:
+    # Not required for Django <= 1.9, see:
+    # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
+    MiddlewareMixin = object
+
+# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = "u95e#xr$t3!vdux)fj11!*q*^w^^r#kiyrvt3kjui-t_k%m3op"
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ["localhost"]
+
+
+# Application definition
+
+INSTALLED_APPS = [
+    "django.contrib.auth",
+    "django.contrib.contenttypes",
+    "django.contrib.sessions",
+    "django.contrib.messages",
+    "django.contrib.staticfiles",
+    "tests.integrations.django.myapp",
+]
+
+
+class TestMiddleware(MiddlewareMixin):
+    def process_request(self, request):
+        if "middleware-exc" in request.path:
+            1 / 0
+
+    def process_response(self, request, response):
+        return response
+
+
+MIDDLEWARE_CLASSES = [
+    "django.contrib.sessions.middleware.SessionMiddleware",
+    "django.contrib.auth.middleware.AuthenticationMiddleware",
+    "tests.integrations.django.myapp.settings.TestMiddleware",
+]
+
+if MiddlewareMixin is not object:
+    MIDDLEWARE = MIDDLEWARE_CLASSES
+
+
+ROOT_URLCONF = "tests.integrations.django.myapp.urls"
+
+TEMPLATES = [
+    {
+        "BACKEND": "django.template.backends.django.DjangoTemplates",
+        "DIRS": [],
+        "APP_DIRS": True,
+        "OPTIONS": {
+            "debug": True,
+            "context_processors": [
+                "django.template.context_processors.debug",
+                "django.template.context_processors.request",
+                "django.contrib.auth.context_processors.auth",
+                "django.contrib.messages.context_processors.messages",
+            ],
+        },
+    }
+]
+
+WSGI_APPLICATION = "tests.integrations.django.myapp.wsgi.application"
+
+
+# Database
+# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
+
+DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
+
+try:
+    import psycopg2  # noqa
+
+    DATABASES["postgres"] = {
+        "ENGINE": "django.db.backends.postgresql_psycopg2",
+        "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
+        "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
+        "HOST": "localhost",
+        "PORT": 5432,
+    }
+except (ImportError, KeyError):
+    pass
+
+
+# Password validation
+# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
+
+AUTH_PASSWORD_VALIDATORS = [
+    {
+        "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
+    },
+    {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
+    {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
+    {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
+]
+
+
+# Internationalization
+# https://docs.djangoproject.com/en/2.0/topics/i18n/
+
+LANGUAGE_CODE = "en-us"
+
+TIME_ZONE = "UTC"
+
+USE_I18N = True
+
+USE_L10N = True
+
+USE_TZ = True
+
+TEMPLATE_DEBUG = True
+
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/2.0/howto/static-files/
+
+STATIC_URL = "/static/"
+
+# django-channels specific
+ASGI_APPLICATION = "tests.integrations.django.myapp.routing.application"
diff --git a/tests/integrations/django/myapp/templates/error.html b/tests/integrations/django/myapp/templates/error.html
new file mode 100644
index 0000000..9f60120
--- /dev/null
+++ b/tests/integrations/django/myapp/templates/error.html
@@ -0,0 +1,20 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+{% invalid template tag %}
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
new file mode 100644
index 0000000..482d194
--- /dev/null
+++ b/tests/integrations/django/myapp/urls.py
@@ -0,0 +1,69 @@
+"""myapp URL Configuration
+
+The `urlpatterns` list routes URLs to views. For more information please see:
+    https://docs.djangoproject.com/en/2.0/topics/http/urls/
+Examples:
+Function views
+    1. Add an import:  from my_app import views
+    2. Add a URL to urlpatterns:  path('', views.home, name='home')
+Class-based views
+    1. Add an import:  from other_app.views import Home
+    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
+Including another URLconf
+    1. Import the include() function: from django.urls import include, path
+    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
+"""
+from __future__ import absolute_import
+
+try:
+    from django.urls import path
+except ImportError:
+    from django.conf.urls import url as path
+
+from . import views
+
+urlpatterns = [
+    path("view-exc", views.view_exc, name="view_exc"),
+    path(
+        "read-body-and-view-exc",
+        views.read_body_and_view_exc,
+        name="read_body_and_view_exc",
+    ),
+    path("middleware-exc", views.message, name="middleware_exc"),
+    path("message", views.message, name="message"),
+    path("mylogin", views.mylogin, name="mylogin"),
+    path("classbased", views.ClassBasedView.as_view(), name="classbased"),
+    path("post-echo", views.post_echo, name="post_echo"),
+    path("template-exc", views.template_exc, name="template_exc"),
+    path(
+        "permission-denied-exc",
+        views.permission_denied_exc,
+        name="permission_denied_exc",
+    ),
+]
+
+
+try:
+    urlpatterns.append(
+        path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
+    )
+    urlpatterns.append(
+        path(
+            "rest-framework-read-body-and-exc",
+            views.rest_framework_read_body_and_exc,
+            name="rest_framework_read_body_and_exc",
+        )
+    )
+    urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
+    urlpatterns.append(
+        path(
+            "rest-permission-denied-exc",
+            views.rest_permission_denied_exc,
+            name="rest_permission_denied_exc",
+        )
+    )
+except AttributeError:
+    pass
+
+handler500 = views.handler500
+handler404 = views.handler404
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
new file mode 100644
index 0000000..ebe667c
--- /dev/null
+++ b/tests/integrations/django/myapp/views.py
@@ -0,0 +1,84 @@
+from django.contrib.auth import login
+from django.contrib.auth.models import User
+from django.core.exceptions import PermissionDenied
+from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound
+from django.shortcuts import render
+from django.views.generic import ListView
+
+try:
+    from rest_framework.decorators import api_view
+
+    @api_view(["POST"])
+    def rest_framework_exc(request):
+        1 / 0
+
+    @api_view(["POST"])
+    def rest_framework_read_body_and_exc(request):
+        request.data
+        1 / 0
+
+    @api_view(["GET"])
+    def rest_hello(request):
+        return HttpResponse("ok")
+
+    @api_view(["GET"])
+    def rest_permission_denied_exc(request):
+        raise PermissionDenied("bye")
+
+
+except ImportError:
+    pass
+
+
+import sentry_sdk
+
+
+def view_exc(request):
+    1 / 0
+
+
+def read_body_and_view_exc(request):
+    request.read()
+    1 / 0
+
+
+def message(request):
+    sentry_sdk.capture_message("hi")
+    return HttpResponse("ok")
+
+
+def mylogin(request):
+    user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
+    user.backend = "django.contrib.auth.backends.ModelBackend"
+    login(request, user)
+    return HttpResponse("ok")
+
+
+def handler500(request):
+    return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
+
+
+class ClassBasedView(ListView):
+    model = None
+
+    def head(self, *args, **kwargs):
+        sentry_sdk.capture_message("hi")
+        return HttpResponse("")
+
+
+def post_echo(request):
+    sentry_sdk.capture_message("hi")
+    return HttpResponse(request.body)
+
+
+def handler404(*args, **kwargs):
+    sentry_sdk.capture_message("not found", level="error")
+    return HttpResponseNotFound("404")
+
+
+def template_exc(request, *args, **kwargs):
+    return render(request, "error.html")
+
+
+def permission_denied_exc(*args, **kwargs):
+    raise PermissionDenied("bye")
diff --git a/tests/integrations/django/myapp/wsgi.py b/tests/integrations/django/myapp/wsgi.py
new file mode 100644
index 0000000..8c01991
--- /dev/null
+++ b/tests/integrations/django/myapp/wsgi.py
@@ -0,0 +1,18 @@
+"""
+WSGI config for myapp project.
+
+It exposes the WSGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
+"""
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+os.environ.setdefault(
+    "DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
+)
+
+application = get_wsgi_application()
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
new file mode 100644
index 0000000..40160a2
--- /dev/null
+++ b/tests/integrations/django/test_basic.py
@@ -0,0 +1,562 @@
+import pytest
+import json
+
+from werkzeug.test import Client
+from django import VERSION as DJANGO_VERSION
+from django.contrib.auth.models import User
+from django.core.management import execute_from_command_line
+from django.db.utils import OperationalError, ProgrammingError, DataError
+
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+from sentry_sdk import capture_message, capture_exception
+from sentry_sdk.integrations.django import DjangoIntegration
+
+from tests.integrations.django.myapp.wsgi import application
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    exceptions = capture_exceptions()
+    events = capture_events()
+    client.get(reverse("view_exc"))
+
+    error, = exceptions
+    assert isinstance(error, ZeroDivisionError)
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "django"
+
+
+def test_middleware_exceptions(sentry_init, client, capture_exceptions):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    exceptions = capture_exceptions()
+    client.get(reverse("middleware_exc"))
+
+    error, = exceptions
+    assert isinstance(error, ZeroDivisionError)
+
+
+def test_request_captured(sentry_init, client, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    content, status, headers = client.get(reverse("message"))
+    assert b"".join(content) == b"ok"
+
+    event, = events
+    assert event["transaction"] == "/message"
+    assert event["request"] == {
+        "cookies": {},
+        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
+        "headers": {"Host": "localhost"},
+        "method": "GET",
+        "query_string": "",
+        "url": "http://localhost/message",
+    }
+
+
+def test_transaction_with_class_view(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration(transaction_style="function_name")],
+        send_default_pii=True,
+    )
+    events = capture_events()
+    content, status, headers = client.head(reverse("classbased"))
+    assert status.lower() == "200 ok"
+
+    event, = events
+
+    assert (
+        event["transaction"] == "tests.integrations.django.myapp.views.ClassBasedView"
+    )
+    assert event["message"] == "hi"
+
+
+@pytest.mark.django_db
+def test_user_captured(sentry_init, client, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    content, status, headers = client.get(reverse("mylogin"))
+    assert b"".join(content) == b"ok"
+
+    assert not events
+
+    content, status, headers = client.get(reverse("message"))
+    assert b"".join(content) == b"ok"
+
+    event, = events
+
+    assert event["user"] == {
+        "email": "lennon@thebeatles.com",
+        "username": "john",
+        "id": "1",
+    }
+
+
+@pytest.mark.django_db
+def test_queryset_repr(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+    events = capture_events()
+    User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
+
+    try:
+        my_queryset = User.objects.all()  # noqa
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    frame, = exception["stacktrace"]["frames"]
+    assert frame["vars"]["my_queryset"].startswith(
+        "= (1, 10):
+        reference_value = [
+            "tests.integrations.django.myapp.settings.TestMiddleware.__call__",
+            "django.contrib.auth.middleware.AuthenticationMiddleware.__call__",
+            "django.contrib.sessions.middleware.SessionMiddleware.__call__",
+        ]
+    else:
+        reference_value = [
+            "django.contrib.sessions.middleware.SessionMiddleware.process_request",
+            "django.contrib.auth.middleware.AuthenticationMiddleware.process_request",
+            "tests.integrations.django.myapp.settings.TestMiddleware.process_request",
+            "tests.integrations.django.myapp.settings.TestMiddleware.process_response",
+            "django.contrib.sessions.middleware.SessionMiddleware.process_response",
+        ]
+
+    assert [t["description"] for t in transaction["spans"]] == reference_value
+
+
+def test_middleware_spans_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration(middleware_spans=False)], traces_sample_rate=1.0
+    )
+    events = capture_events()
+
+    _content, status, _headers = client.get(reverse("message"))
+
+    message, transaction = events
+
+    assert message["message"] == "hi"
+
+    assert not transaction["spans"]
diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py
new file mode 100644
index 0000000..5cf3f17
--- /dev/null
+++ b/tests/integrations/django/test_transactions.py
@@ -0,0 +1,52 @@
+from __future__ import absolute_import
+
+import pytest
+import django
+
+try:
+    from django.conf.urls import url, include
+except ImportError:
+    # for Django version less than 1.4
+    from django.conf.urls.defaults import url, include  # NOQA
+
+from sentry_sdk.integrations.django.transactions import RavenResolver
+
+
+if django.VERSION < (1, 9):
+    included_url_conf = (url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
+else:
+    included_url_conf = ((url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
+
+example_url_conf = (
+    url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
+    url(r"^report/", lambda x: ""),
+    url(r"^example/", include(included_url_conf)),
+)
+
+
+def test_legacy_resolver_no_match():
+    resolver = RavenResolver()
+    result = resolver.resolve("/foo/bar", example_url_conf)
+    assert result == "/foo/bar"
+
+
+def test_legacy_resolver_complex_match():
+    resolver = RavenResolver()
+    result = resolver.resolve("/api/1234/store/", example_url_conf)
+    assert result == "/api/{project_id}/store/"
+
+
+def test_legacy_resolver_included_match():
+    resolver = RavenResolver()
+    result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
+    assert result == "/example/foo/bar/{param}"
+
+
+@pytest.mark.skipif(django.VERSION < (2, 0), reason="Requires Django > 2.0")
+def test_legacy_resolver_newstyle_django20_urlconf():
+    from django.urls import path
+
+    url_conf = (path("api/v2//store/", lambda x: ""),)
+    resolver = RavenResolver()
+    result = resolver.resolve("/api/v2/1234/store/", url_conf)
+    assert result == "/api/v2/{project_id}/store/"
diff --git a/tests/integrations/excepthook/test_excepthook.py b/tests/integrations/excepthook/test_excepthook.py
new file mode 100644
index 0000000..18deccd
--- /dev/null
+++ b/tests/integrations/excepthook/test_excepthook.py
@@ -0,0 +1,76 @@
+import pytest
+import sys
+import subprocess
+
+from textwrap import dedent
+
+
+def test_excepthook(tmpdir):
+    app = tmpdir.join("app.py")
+    app.write(
+        dedent(
+            """
+    from sentry_sdk import init, transport
+
+    def send_event(self, event):
+        print("capture event was called")
+        print(event)
+
+    transport.HttpTransport._send_event = send_event
+
+    init("http://foobar@localhost/123")
+
+    frame_value = "LOL"
+
+    1/0
+    """
+        )
+    )
+
+    with pytest.raises(subprocess.CalledProcessError) as excinfo:
+        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
+
+    output = excinfo.value.output
+    print(output)
+
+    assert b"ZeroDivisionError" in output
+    assert b"LOL" in output
+    assert b"capture event was called" in output
+
+
+def test_always_value_excepthook(tmpdir):
+    app = tmpdir.join("app.py")
+    app.write(
+        dedent(
+            """
+    import sys
+    from sentry_sdk import init, transport
+    from sentry_sdk.integrations.excepthook import ExcepthookIntegration
+
+    def send_event(self, event):
+        print("capture event was called")
+        print(event)
+
+    transport.HttpTransport._send_event = send_event
+
+    sys.ps1 = "always_value_test"
+    init("http://foobar@localhost/123",
+        integrations=[ExcepthookIntegration(always_run=True)]
+    )
+
+    frame_value = "LOL"
+
+    1/0
+    """
+        )
+    )
+
+    with pytest.raises(subprocess.CalledProcessError) as excinfo:
+        subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
+
+    output = excinfo.value.output
+    print(output)
+
+    assert b"ZeroDivisionError" in output
+    assert b"LOL" in output
+    assert b"capture event was called" in output
diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py
new file mode 100644
index 0000000..995cb26
--- /dev/null
+++ b/tests/integrations/falcon/test_falcon.py
@@ -0,0 +1,306 @@
+from __future__ import absolute_import
+
+import logging
+
+import pytest
+
+pytest.importorskip("falcon")
+
+import falcon
+import falcon.testing
+import sentry_sdk
+from sentry_sdk.integrations.falcon import FalconIntegration
+from sentry_sdk.integrations.logging import LoggingIntegration
+
+
+@pytest.fixture
+def make_app(sentry_init):
+    def inner():
+        class MessageResource:
+            def on_get(self, req, resp):
+                sentry_sdk.capture_message("hi")
+                resp.media = "hi"
+
+        app = falcon.API()
+        app.add_route("/message", MessageResource())
+
+        return app
+
+    return inner
+
+
+@pytest.fixture
+def make_client(make_app):
+    def inner():
+        app = make_app()
+        return falcon.testing.TestClient(app)
+
+    return inner
+
+
+def test_has_context(sentry_init, capture_events, make_client):
+    sentry_init(integrations=[FalconIntegration()])
+    events = capture_events()
+
+    client = make_client()
+    response = client.simulate_get("/message")
+    assert response.status == falcon.HTTP_200
+
+    event, = events
+    assert event["transaction"] == "/message"  # Falcon URI template
+    assert "data" not in event["request"]
+    assert event["request"]["url"] == "http://falconframework.org/message"
+
+
+@pytest.mark.parametrize(
+    "transaction_style,expected_transaction",
+    [("uri_template", "/message"), ("path", "/message")],
+)
+def test_transaction_style(
+    sentry_init, make_client, capture_events, transaction_style, expected_transaction
+):
+    integration = FalconIntegration(transaction_style=transaction_style)
+    sentry_init(integrations=[integration])
+    events = capture_events()
+
+    client = make_client()
+    response = client.simulate_get("/message")
+    assert response.status == falcon.HTTP_200
+
+    event, = events
+    assert event["transaction"] == expected_transaction
+
+
+def test_errors(sentry_init, capture_exceptions, capture_events):
+    sentry_init(integrations=[FalconIntegration()], debug=True)
+
+    class ZeroDivisionErrorResource:
+        def on_get(self, req, resp):
+            1 / 0
+
+    app = falcon.API()
+    app.add_route("/", ZeroDivisionErrorResource())
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+
+    try:
+        client.simulate_get("/")
+    except ZeroDivisionError:
+        pass
+
+    exc, = exceptions
+    assert isinstance(exc, ZeroDivisionError)
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
+
+
+def test_falcon_large_json_request(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+
+    data = {"foo": {"bar": "a" * 2000}}
+
+    class Resource:
+        def on_post(self, req, resp):
+            assert req.media == data
+            sentry_sdk.capture_message("hi")
+            resp.media = "ok"
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    response = client.simulate_post("/", json=data)
+    assert response.status == falcon.HTTP_200
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+
+
+@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
+def test_falcon_empty_json_request(sentry_init, capture_events, data):
+    sentry_init(integrations=[FalconIntegration()])
+
+    class Resource:
+        def on_post(self, req, resp):
+            assert req.media == data
+            sentry_sdk.capture_message("hi")
+            resp.media = "ok"
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    response = client.simulate_post("/", json=data)
+    assert response.status == falcon.HTTP_200
+
+    event, = events
+    assert event["request"]["data"] == data
+
+
+def test_falcon_raw_data_request(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+
+    class Resource:
+        def on_post(self, req, resp):
+            sentry_sdk.capture_message("hi")
+            resp.media = "ok"
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    response = client.simulate_post("/", body="hi")
+    assert response.status == falcon.HTTP_200
+
+    event, = events
+    assert event["request"]["headers"]["Content-Length"] == "2"
+    assert event["request"]["data"] == ""
+
+
+def test_logging(sentry_init, capture_events):
+    sentry_init(
+        integrations=[FalconIntegration(), LoggingIntegration(event_level="ERROR")]
+    )
+
+    logger = logging.getLogger()
+
+    app = falcon.API()
+
+    class Resource:
+        def on_get(self, req, resp):
+            logger.error("hi")
+            resp.media = "ok"
+
+    app.add_route("/", Resource())
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    client.simulate_get("/")
+
+    event, = events
+    assert event["level"] == "error"
+
+
+def test_500(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+
+    app = falcon.API()
+
+    class Resource:
+        def on_get(self, req, resp):
+            1 / 0
+
+    app.add_route("/", Resource())
+
+    def http500_handler(ex, req, resp, params):
+        sentry_sdk.capture_exception(ex)
+        resp.media = {"message": "Sentry error: %s" % sentry_sdk.last_event_id()}
+
+    app.add_error_handler(Exception, http500_handler)
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    response = client.simulate_get("/")
+
+    event, = events
+    assert response.json == {"message": "Sentry error: %s" % event["event_id"]}
+
+
+def test_error_in_errorhandler(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+
+    app = falcon.API()
+
+    class Resource:
+        def on_get(self, req, resp):
+            raise ValueError()
+
+    app.add_route("/", Resource())
+
+    def http500_handler(ex, req, resp, params):
+        1 / 0
+
+    app.add_error_handler(Exception, http500_handler)
+
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+
+    with pytest.raises(ZeroDivisionError):
+        client.simulate_get("/")
+
+    event, = events
+
+    last_ex_values = event["exception"]["values"][-1]
+    assert last_ex_values["type"] == "ZeroDivisionError"
+    assert last_ex_values["stacktrace"]["frames"][-1]["vars"]["ex"] == "ValueError()"
+
+
+def test_bad_request_not_captured(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+    events = capture_events()
+
+    app = falcon.API()
+
+    class Resource:
+        def on_get(self, req, resp):
+            raise falcon.HTTPBadRequest()
+
+    app.add_route("/", Resource())
+
+    client = falcon.testing.TestClient(app)
+
+    client.simulate_get("/")
+
+    assert not events
+
+
+def test_does_not_leak_scope(sentry_init, capture_events):
+    sentry_init(integrations=[FalconIntegration()])
+    events = capture_events()
+
+    with sentry_sdk.configure_scope() as scope:
+        scope.set_tag("request_data", False)
+
+    app = falcon.API()
+
+    class Resource:
+        def on_get(self, req, resp):
+            with sentry_sdk.configure_scope() as scope:
+                scope.set_tag("request_data", True)
+
+            def generator():
+                for row in range(1000):
+                    with sentry_sdk.configure_scope() as scope:
+                        assert scope._tags["request_data"]
+
+                    yield (str(row) + "\n").encode()
+
+            resp.stream = generator()
+
+    app.add_route("/", Resource())
+
+    client = falcon.testing.TestClient(app)
+    response = client.simulate_get("/")
+
+    expected_response = "".join(str(row) + "\n" for row in range(1000))
+    assert response.text == expected_response
+    assert not events
+
+    with sentry_sdk.configure_scope() as scope:
+        assert not scope._tags["request_data"]
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
new file mode 100644
index 0000000..dcedf3c
--- /dev/null
+++ b/tests/integrations/flask/test_flask.py
@@ -0,0 +1,623 @@
+import json
+import pytest
+
+from io import BytesIO
+
+flask = pytest.importorskip("flask")
+
+from flask import Flask, Response, request, abort, stream_with_context
+from flask.views import View
+
+from flask_login import LoginManager, login_user
+
+from sentry_sdk import (
+    configure_scope,
+    capture_message,
+    capture_exception,
+    last_event_id,
+)
+from sentry_sdk.integrations.logging import LoggingIntegration
+import sentry_sdk.integrations.flask as flask_sentry
+
+
+login_manager = LoginManager()
+
+
+@pytest.fixture
+def app():
+    app = Flask(__name__)
+    app.config["TESTING"] = True
+    app.secret_key = "haha"
+
+    login_manager.init_app(app)
+
+    @app.route("/message")
+    def hi():
+        capture_message("hi")
+        return "ok"
+
+    return app
+
+
+def test_has_context(sentry_init, app, capture_events):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.get("/message")
+    assert response.status_code == 200
+
+    event, = events
+    assert event["transaction"] == "hi"
+    assert "data" not in event["request"]
+    assert event["request"]["url"] == "http://localhost/message"
+
+
+@pytest.mark.parametrize(
+    "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")]
+)
+def test_transaction_style(
+    sentry_init, app, capture_events, transaction_style, expected_transaction
+):
+    sentry_init(
+        integrations=[
+            flask_sentry.FlaskIntegration(transaction_style=transaction_style)
+        ]
+    )
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.get("/message")
+    assert response.status_code == 200
+
+    event, = events
+    assert event["transaction"] == expected_transaction
+
+
+@pytest.mark.parametrize("debug", (True, False))
+@pytest.mark.parametrize("testing", (True, False))
+def test_errors(sentry_init, capture_exceptions, capture_events, app, debug, testing):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()], debug=True)
+
+    app.debug = debug
+    app.testing = testing
+
+    @app.route("/")
+    def index():
+        1 / 0
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = app.test_client()
+    try:
+        client.get("/")
+    except ZeroDivisionError:
+        pass
+
+    exc, = exceptions
+    assert isinstance(exc, ZeroDivisionError)
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "flask"
+
+
+def test_flask_login_not_installed(sentry_init, app, capture_events, monkeypatch):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    monkeypatch.setattr(flask_sentry, "flask_login", None)
+
+    events = capture_events()
+
+    client = app.test_client()
+    client.get("/message")
+
+    event, = events
+    assert event.get("user", {}).get("id") is None
+
+
+def test_flask_login_not_configured(sentry_init, app, capture_events, monkeypatch):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    assert flask_sentry.flask_login
+
+    events = capture_events()
+    client = app.test_client()
+    client.get("/message")
+
+    event, = events
+    assert event.get("user", {}).get("id") is None
+
+
+def test_flask_login_partially_configured(
+    sentry_init, app, capture_events, monkeypatch
+):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    events = capture_events()
+
+    login_manager = LoginManager()
+    login_manager.init_app(app)
+
+    client = app.test_client()
+    client.get("/message")
+
+    event, = events
+    assert event.get("user", {}).get("id") is None
+
+
+@pytest.mark.parametrize("send_default_pii", [True, False])
+@pytest.mark.parametrize("user_id", [None, "42", 3])
+def test_flask_login_configured(
+    send_default_pii, sentry_init, app, user_id, capture_events, monkeypatch
+):
+    sentry_init(
+        send_default_pii=send_default_pii,
+        integrations=[flask_sentry.FlaskIntegration()],
+    )
+
+    class User(object):
+        is_authenticated = is_active = True
+        is_anonymous = user_id is not None
+
+        def get_id(self):
+            return str(user_id)
+
+    @login_manager.user_loader
+    def load_user(user_id):
+        if user_id is not None:
+            return User()
+
+    @app.route("/login")
+    def login():
+        if user_id is not None:
+            login_user(User())
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    assert client.get("/login").status_code == 200
+    assert not events
+
+    assert client.get("/message").status_code == 200
+
+    event, = events
+    if user_id is None or not send_default_pii:
+        assert event.get("user", {}).get("id") is None
+    else:
+        assert event["user"]["id"] == str(user_id)
+
+
+def test_flask_large_json_request(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    data = {"foo": {"bar": "a" * 2000}}
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.get_json() == data
+        assert request.get_data() == json.dumps(data).encode("ascii")
+        assert not request.form
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response.status_code == 200
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+
+
+@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
+def test_flask_empty_json_request(sentry_init, capture_events, app, data):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.get_json() == data
+        assert request.get_data() == json.dumps(data).encode("ascii")
+        assert not request.form
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response.status_code == 200
+
+    event, = events
+    assert event["request"]["data"] == data
+
+
+def test_flask_medium_formdata_request(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    data = {"foo": "a" * 2000}
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.form["foo"] == data["foo"]
+        assert not request.get_data()
+        assert not request.get_json()
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", data=data)
+    assert response.status_code == 200
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]) == 512
+
+
+@pytest.mark.parametrize("input_char", [u"a", b"a"])
+def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small")
+
+    data = input_char * 2000
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert not request.form
+        if isinstance(data, bytes):
+            assert request.get_data() == data
+        else:
+            assert request.get_data() == data.encode("ascii")
+        assert not request.get_json()
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", data=data)
+    assert response.status_code == 200
+
+    event, = events
+    assert event["_meta"]["request"]["data"] == {
+        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
+    }
+    assert not event["request"]["data"]
+
+
+def test_flask_files_and_form(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="always")
+
+    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert list(request.form) == ["foo"]
+        assert list(request.files) == ["file"]
+        assert not request.get_json()
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", data=data)
+    assert response.status_code == 200
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]) == 512
+
+    assert event["_meta"]["request"]["data"]["file"] == {
+        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
+    }
+    assert not event["request"]["data"]["file"]
+
+
+@pytest.mark.parametrize(
+    "integrations",
+    [
+        [flask_sentry.FlaskIntegration()],
+        [flask_sentry.FlaskIntegration(), LoggingIntegration(event_level="ERROR")],
+    ],
+)
+def test_errors_not_reported_twice(sentry_init, integrations, capture_events, app):
+    sentry_init(integrations=integrations)
+
+    @app.route("/")
+    def index():
+        try:
+            1 / 0
+        except Exception as e:
+            app.logger.exception(e)
+            raise e
+
+    events = capture_events()
+
+    client = app.test_client()
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    assert len(events) == 1
+
+
+def test_logging(sentry_init, capture_events, app):
+    # ensure that Flask's logger magic doesn't break ours
+    sentry_init(
+        integrations=[
+            flask_sentry.FlaskIntegration(),
+            LoggingIntegration(event_level="ERROR"),
+        ]
+    )
+
+    @app.route("/")
+    def index():
+        app.logger.error("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    client.get("/")
+
+    event, = events
+    assert event["level"] == "error"
+
+
+def test_no_errors_without_request(app, sentry_init):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    with app.app_context():
+        capture_exception(ValueError())
+
+
+def test_cli_commands_raise(app):
+    if not hasattr(app, "cli"):
+        pytest.skip("Too old flask version")
+
+    from flask.cli import ScriptInfo
+
+    @app.cli.command()
+    def foo():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        app.cli.main(
+            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
+        )
+
+
+def test_wsgi_level_error_is_caught(
+    app, capture_exceptions, capture_events, sentry_init
+):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    def wsgi_app(environ, start_response):
+        1 / 0
+
+    app.wsgi_app = wsgi_app
+
+    client = app.test_client()
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    with pytest.raises(ZeroDivisionError) as exc:
+        client.get("/")
+
+    error, = exceptions
+
+    assert error is exc.value
+
+    event, = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
+
+
+def test_500(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    app.debug = False
+    app.testing = False
+
+    @app.route("/")
+    def index():
+        1 / 0
+
+    @app.errorhandler(500)
+    def error_handler(err):
+        return "Sentry error: %s" % last_event_id()
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.get("/")
+
+    event, = events
+    assert response.data.decode("utf-8") == "Sentry error: %s" % event["event_id"]
+
+
+def test_error_in_errorhandler(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    app.debug = False
+    app.testing = False
+
+    @app.route("/")
+    def index():
+        raise ValueError()
+
+    @app.errorhandler(500)
+    def error_handler(err):
+        1 / 0
+
+    events = capture_events()
+
+    client = app.test_client()
+
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    event1, event2 = events
+
+    exception, = event1["exception"]["values"]
+    assert exception["type"] == "ValueError"
+
+    exception = event2["exception"]["values"][-1]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+def test_bad_request_not_captured(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        abort(400)
+
+    client = app.test_client()
+
+    client.get("/")
+
+    assert not events
+
+
+def test_does_not_leak_scope(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    with configure_scope() as scope:
+        scope.set_tag("request_data", False)
+
+    @app.route("/")
+    def index():
+        with configure_scope() as scope:
+            scope.set_tag("request_data", True)
+
+        def generate():
+            for row in range(1000):
+                with configure_scope() as scope:
+                    assert scope._tags["request_data"]
+
+                yield str(row) + "\n"
+
+        return Response(stream_with_context(generate()), mimetype="text/csv")
+
+    client = app.test_client()
+    response = client.get("/")
+    assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
+    assert not events
+
+    with configure_scope() as scope:
+        assert not scope._tags["request_data"]
+
+
+def test_scoped_test_client(sentry_init, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    @app.route("/")
+    def index():
+        return "ok"
+
+    with app.test_client() as client:
+        response = client.get("/")
+        assert response.status_code == 200
+
+
+@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
+def test_errorhandler_for_exception_swallows_exception(
+    sentry_init, app, capture_events, exc_cls
+):
+    # In contrast to error handlers for a status code, error
+    # handlers for exceptions can swallow the exception (this is
+    # just how the Flask signal works)
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        1 / 0
+
+    @app.errorhandler(exc_cls)
+    def zerodivision(e):
+        return "ok"
+
+    with app.test_client() as client:
+        response = client.get("/")
+        assert response.status_code == 200
+
+    assert not events
+
+
+def test_tracing_success(sentry_init, capture_events, app):
+    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
+
+    events = capture_events()
+
+    with app.test_client() as client:
+        response = client.get("/message")
+        assert response.status_code == 200
+
+    message_event, transaction_event = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "hi"
+    assert "status" not in transaction_event["contexts"]["trace"]
+
+    assert message_event["message"] == "hi"
+    assert message_event["transaction"] == "hi"
+
+
+def test_tracing_error(sentry_init, capture_events, app):
+    sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
+
+    events = capture_events()
+
+    @app.route("/error")
+    def error():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        with app.test_client() as client:
+            response = client.get("/error")
+            assert response.status_code == 500
+
+    error_event, transaction_event = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "error"
+    assert transaction_event["contexts"]["trace"]["status"] == "failure"
+
+    assert error_event["transaction"] == "error"
+    exception, = error_event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+def test_class_based_views(sentry_init, app, capture_events):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    class HelloClass(View):
+        def dispatch_request(self):
+            capture_message("hi")
+            return "ok"
+
+    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))
+
+    with app.test_client() as client:
+        response = client.get("/hello-class/")
+        assert response.status_code == 200
+
+    event, = events
+
+    assert event["message"] == "hi"
+    assert event["transaction"] == "hello_class"
diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py
new file mode 100644
index 0000000..c068c40
--- /dev/null
+++ b/tests/integrations/logging/test_logging.py
@@ -0,0 +1,128 @@
+import sys
+
+import pytest
+import logging
+
+from sentry_sdk.integrations.logging import LoggingIntegration
+
+other_logger = logging.getLogger("testfoo")
+logger = logging.getLogger(__name__)
+
+
+@pytest.fixture(autouse=True)
+def reset_level():
+    other_logger.setLevel(logging.DEBUG)
+    logger.setLevel(logging.DEBUG)
+
+
+@pytest.mark.parametrize("logger", [logger, other_logger])
+def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
+    sentry_init(integrations=[LoggingIntegration(event_level="ERROR")])
+    events = capture_events()
+
+    logger.info("bread")
+    logger.critical("LOL")
+    event, = events
+    assert event["level"] == "fatal"
+    assert not event["logentry"]["params"]
+    assert event["logentry"]["message"] == "LOL"
+    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"])
+
+
+@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
+def test_logging_defaults(integrations, sentry_init, capture_events):
+    sentry_init(integrations=integrations)
+    events = capture_events()
+
+    logger.info("bread")
+    logger.critical("LOL")
+    event, = events
+
+    assert event["level"] == "fatal"
+    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"])
+    assert not any(crumb["message"] == "LOL" for crumb in event["breadcrumbs"])
+    assert "threads" not in event
+
+
+def test_logging_extra_data(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    logger.info("bread", extra=dict(foo=42))
+    logger.critical("lol", extra=dict(bar=69))
+
+    event, = events
+
+    assert event["level"] == "fatal"
+    assert event["extra"] == {"bar": 69}
+    assert any(
+        crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
+        for crumb in event["breadcrumbs"]
+    )
+
+
+def test_logging_extra_data_integer_keys(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    logger.critical("integer in extra keys", extra={1: 1})
+
+    event, = events
+
+    assert event["extra"] == {"1": 1}
+
+
+@pytest.mark.xfail(sys.version_info[:2] == (3, 4), reason="buggy logging module")
+def test_logging_stack(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    logger.error("first", exc_info=True)
+    logger.error("second")
+
+    event_with, event_without, = events
+
+    assert event_with["level"] == "error"
+    assert event_with["threads"]["values"][0]["stacktrace"]["frames"]
+
+    assert event_without["level"] == "error"
+    assert "threads" not in event_without
+
+
+def test_logging_level(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    logger.setLevel(logging.WARNING)
+    logger.error("hi")
+    event, = events
+    assert event["level"] == "error"
+    assert event["logentry"]["message"] == "hi"
+
+    del events[:]
+
+    logger.setLevel(logging.ERROR)
+    logger.warning("hi")
+    assert not events
+
+
+def test_logging_filters(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    should_log = False
+
+    class MyFilter(logging.Filter):
+        def filter(self, record):
+            return should_log
+
+    logger.addFilter(MyFilter())
+    logger.error("hi")
+
+    assert not events
+
+    should_log = True
+    logger.error("hi")
+
+    event, = events
+    assert event["logentry"]["message"] == "hi"
diff --git a/tests/integrations/modules/test_modules.py b/tests/integrations/modules/test_modules.py
new file mode 100644
index 0000000..1529afc
--- /dev/null
+++ b/tests/integrations/modules/test_modules.py
@@ -0,0 +1,14 @@
+import sentry_sdk
+
+from sentry_sdk.integrations.modules import ModulesIntegration
+
+
+def test_basic(sentry_init, capture_events):
+    sentry_init(integrations=[ModulesIntegration()])
+    events = capture_events()
+
+    sentry_sdk.capture_exception(ValueError())
+
+    event, = events
+    assert "sentry-sdk" in event["modules"]
+    assert "pytest" in event["modules"]
diff --git a/tests/integrations/pyramid/__init__.py b/tests/integrations/pyramid/__init__.py
new file mode 100644
index 0000000..b63de1d
--- /dev/null
+++ b/tests/integrations/pyramid/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pyramid = pytest.importorskip("pyramid")
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
new file mode 100644
index 0000000..dd2ee3d
--- /dev/null
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -0,0 +1,361 @@
+import json
+import logging
+import pkg_resources
+import pytest
+
+from io import BytesIO
+
+import pyramid.testing
+
+from pyramid.authorization import ACLAuthorizationPolicy
+from pyramid.response import Response
+
+from sentry_sdk import capture_message, add_breadcrumb
+from sentry_sdk.integrations.pyramid import PyramidIntegration
+
+from werkzeug.test import Client
+
+
+PYRAMID_VERSION = tuple(
+    map(int, pkg_resources.get_distribution("pyramid").version.split("."))
+)
+
+
+def hi(request):
+    capture_message("hi")
+    return Response("hi")
+
+
+@pytest.fixture
+def pyramid_config():
+    config = pyramid.testing.setUp()
+    try:
+        config.add_route("hi", "/message")
+        config.add_view(hi, route_name="hi")
+        yield config
+    finally:
+        pyramid.testing.tearDown()
+
+
+@pytest.fixture
+def route(pyramid_config):
+    def inner(url):
+        def wrapper(f):
+            pyramid_config.add_route(f.__name__, url)
+            pyramid_config.add_view(f, route_name=f.__name__)
+            return f
+
+        return wrapper
+
+    return inner
+
+
+@pytest.fixture
+def get_client(pyramid_config):
+    def inner():
+        return Client(pyramid_config.make_wsgi_app())
+
+    return inner
+
+
+def test_view_exceptions(
+    get_client, route, sentry_init, capture_events, capture_exceptions
+):
+    sentry_init(integrations=[PyramidIntegration()])
+    events = capture_events()
+    exceptions = capture_exceptions()
+
+    add_breadcrumb({"message": "hi"})
+
+    @route("/errors")
+    def errors(request):
+        add_breadcrumb({"message": "hi2"})
+        1 / 0
+
+    client = get_client()
+    with pytest.raises(ZeroDivisionError):
+        client.get("/errors")
+
+    error, = exceptions
+    assert isinstance(error, ZeroDivisionError)
+
+    event, = events
+    breadcrumb, = event["breadcrumbs"]
+    assert breadcrumb["message"] == "hi2"
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid"
+
+
+def test_has_context(route, get_client, sentry_init, capture_events):
+    sentry_init(integrations=[PyramidIntegration()])
+    events = capture_events()
+
+    @route("/message/{msg}")
+    def hi2(request):
+        capture_message(request.matchdict["msg"])
+        return Response("hi")
+
+    client = get_client()
+    client.get("/message/yoo")
+
+    event, = events
+    assert event["message"] == "yoo"
+    assert event["request"] == {
+        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
+        "headers": {"Host": "localhost"},
+        "method": "GET",
+        "query_string": "",
+        "url": "http://localhost/message/yoo",
+    }
+    assert event["transaction"] == "hi2"
+
+
+@pytest.mark.parametrize(
+    "transaction_style,expected_transaction",
+    [("route_name", "hi"), ("route_pattern", "/message")],
+)
+def test_transaction_style(
+    sentry_init, get_client, capture_events, transaction_style, expected_transaction
+):
+    sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])
+
+    events = capture_events()
+    client = get_client()
+    client.get("/message")
+
+    event, = events
+    assert event["transaction"] == expected_transaction
+
+
+def test_large_json_request(sentry_init, capture_events, route, get_client):
+    sentry_init(integrations=[PyramidIntegration()])
+
+    data = {"foo": {"bar": "a" * 2000}}
+
+    @route("/")
+    def index(request):
+        assert request.json == data
+        assert request.text == json.dumps(data)
+        assert not request.POST
+        capture_message("hi")
+        return Response("ok")
+
+    events = capture_events()
+
+    client = get_client()
+    client.post("/", content_type="application/json", data=json.dumps(data))
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+
+
+@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
+def test_flask_empty_json_request(sentry_init, capture_events, route, get_client, data):
+    sentry_init(integrations=[PyramidIntegration()])
+
+    @route("/")
+    def index(request):
+        assert request.json == data
+        assert request.text == json.dumps(data)
+        assert not request.POST
+        capture_message("hi")
+        return Response("ok")
+
+    events = capture_events()
+
+    client = get_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response[1] == "200 OK"
+
+    event, = events
+    assert event["request"]["data"] == data
+
+
+def test_files_and_form(sentry_init, capture_events, route, get_client):
+    sentry_init(integrations=[PyramidIntegration()], request_bodies="always")
+
+    data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
+
+    @route("/")
+    def index(request):
+        capture_message("hi")
+        return Response("ok")
+
+    events = capture_events()
+
+    client = get_client()
+    client.post("/", data=data)
+
+    event, = events
+    assert event["_meta"]["request"]["data"]["foo"] == {
+        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+    }
+    assert len(event["request"]["data"]["foo"]) == 512
+
+    assert event["_meta"]["request"]["data"]["file"] == {
+        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
+    }
+    assert not event["request"]["data"]["file"]
+
+
+def test_bad_request_not_captured(
+    sentry_init, pyramid_config, capture_events, route, get_client
+):
+    import pyramid.httpexceptions as exc
+
+    sentry_init(integrations=[PyramidIntegration()])
+    events = capture_events()
+
+    @route("/")
+    def index(request):
+        raise exc.HTTPBadRequest()
+
+    def errorhandler(exc, request):
+        return Response("bad request")
+
+    pyramid_config.add_view(errorhandler, context=exc.HTTPBadRequest)
+
+    client = get_client()
+    client.get("/")
+
+    assert not events
+
+
+def test_errorhandler_ok(
+    sentry_init, pyramid_config, capture_exceptions, route, get_client
+):
+    sentry_init(integrations=[PyramidIntegration()])
+    errors = capture_exceptions()
+
+    @route("/")
+    def index(request):
+        raise Exception()
+
+    def errorhandler(exc, request):
+        return Response("bad request")
+
+    pyramid_config.add_view(errorhandler, context=Exception)
+
+    client = get_client()
+    client.get("/")
+
+    assert not errors
+
+
+@pytest.mark.skipif(
+    PYRAMID_VERSION < (1, 9),
+    reason="We don't have the right hooks in older Pyramid versions",
+)
+def test_errorhandler_500(
+    sentry_init, pyramid_config, capture_exceptions, route, get_client
+):
+    sentry_init(integrations=[PyramidIntegration()])
+    errors = capture_exceptions()
+
+    @route("/")
+    def index(request):
+        1 / 0
+
+    def errorhandler(exc, request):
+        return Response("bad request", status=500)
+
+    pyramid_config.add_view(errorhandler, context=Exception)
+
+    client = get_client()
+    app_iter, status, headers = client.get("/")
+    assert b"".join(app_iter) == b"bad request"
+    assert status.lower() == "500 internal server error"
+
+    error, = errors
+
+    assert isinstance(error, ZeroDivisionError)
+
+
+def test_error_in_errorhandler(
+    sentry_init, pyramid_config, capture_events, route, get_client
+):
+    sentry_init(integrations=[PyramidIntegration()])
+
+    @route("/")
+    def index(request):
+        raise ValueError()
+
+    def error_handler(err, request):
+        1 / 0
+
+    pyramid_config.add_view(error_handler, context=ValueError)
+
+    events = capture_events()
+
+    client = get_client()
+
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    event, = events
+
+    exception = event["exception"]["values"][-1]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+def test_error_in_authenticated_userid(
+    sentry_init, pyramid_config, capture_events, route, get_client
+):
+    from sentry_sdk.integrations.logging import LoggingIntegration
+
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            PyramidIntegration(),
+            LoggingIntegration(event_level=logging.ERROR),
+        ],
+    )
+    logger = logging.getLogger("test_pyramid")
+
+    class AuthenticationPolicy(object):
+        def authenticated_userid(self, request):
+            logger.error("failed to identify user")
+
+    pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
+    pyramid_config.set_authentication_policy(AuthenticationPolicy())
+
+    events = capture_events()
+
+    client = get_client()
+    client.get("/message")
+
+    assert len(events) == 1
+
+
+def tween_factory(handler, registry):
+    def tween(request):
+        try:
+            response = handler(request)
+        except Exception:
+            mroute = request.matched_route
+            if mroute and mroute.name in ("index",):
+                return Response("bad request", status_code=400)
+        return response
+
+    return tween
+
+
+def test_tween_ok(sentry_init, pyramid_config, capture_exceptions, route, get_client):
+    sentry_init(integrations=[PyramidIntegration()])
+    errors = capture_exceptions()
+
+    @route("/")
+    def index(request):
+        raise Exception()
+
+    pyramid_config.add_tween(
+        "tests.integrations.pyramid.test_pyramid.tween_factory",
+        under=pyramid.tweens.INGRESS,
+    )
+
+    client = get_client()
+    client.get("/")
+
+    assert not errors
diff --git a/tests/integrations/redis/__init__.py b/tests/integrations/redis/__init__.py
new file mode 100644
index 0000000..4752ef1
--- /dev/null
+++ b/tests/integrations/redis/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis")
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
new file mode 100644
index 0000000..117fac6
--- /dev/null
+++ b/tests/integrations/redis/test_redis.py
@@ -0,0 +1,25 @@
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.redis import RedisIntegration
+
+from fakeredis import FakeStrictRedis
+
+
+def test_basic(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+
+    connection.get("foobar")
+    capture_message("hi")
+
+    event, = events
+    crumb, = event["breadcrumbs"]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py
new file mode 100644
index 0000000..deaa8e3
--- /dev/null
+++ b/tests/integrations/requests/test_requests.py
@@ -0,0 +1,26 @@
+import pytest
+
+requests = pytest.importorskip("requests")
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.stdlib import StdlibIntegration
+
+
+def test_crumb_capture(sentry_init, capture_events):
+    sentry_init(integrations=[StdlibIntegration()])
+    events = capture_events()
+
+    response = requests.get("https://httpbin.org/status/418")
+    assert response.status_code == 418
+    capture_message("Testing!")
+
+    event, = events
+    crumb, = event["breadcrumbs"]
+    assert crumb["type"] == "http"
+    assert crumb["category"] == "httplib"
+    assert crumb["data"] == {
+        "url": "https://httpbin.org/status/418",
+        "method": "GET",
+        "status_code": 418,
+        "reason": "I'M A TEAPOT",
+    }
diff --git a/tests/integrations/rq/__init__.py b/tests/integrations/rq/__init__.py
new file mode 100644
index 0000000..d9714d4
--- /dev/null
+++ b/tests/integrations/rq/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+rq = pytest.importorskip("rq")
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
new file mode 100644
index 0000000..60483ac
--- /dev/null
+++ b/tests/integrations/rq/test_rq.py
@@ -0,0 +1,53 @@
+from sentry_sdk.integrations.rq import RqIntegration
+
+from fakeredis import FakeStrictRedis
+import rq
+
+
+def crashing_job(foo):
+    1 / 0
+
+
+def test_basic(sentry_init, capture_events):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(crashing_job, foo=42)
+    worker.work(burst=True)
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    assert exception["mechanism"]["type"] == "rq"
+    assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
+
+    assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert event["extra"]["rq-job"] == {
+        "args": [],
+        "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
+        "func": "tests.integrations.rq.test_rq.crashing_job",
+        "job_id": event["extra"]["rq-job"]["job_id"],
+        "kwargs": {"foo": 42},
+    }
+
+
+def test_transport_shutdown(sentry_init, capture_events_forksafe):
+    sentry_init(integrations=[RqIntegration()])
+
+    events = capture_events_forksafe()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.Worker([queue], connection=queue.connection)
+
+    queue.enqueue(crashing_job, foo=42)
+    worker.work(burst=True)
+
+    event = events.read_event()
+    events.read_flush()
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
diff --git a/tests/integrations/sanic/__init__.py b/tests/integrations/sanic/__init__.py
new file mode 100644
index 0000000..53449e2
--- /dev/null
+++ b/tests/integrations/sanic/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+sanic = pytest.importorskip("sanic")
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
new file mode 100644
index 0000000..cd6f2be
--- /dev/null
+++ b/tests/integrations/sanic/test_sanic.py
@@ -0,0 +1,171 @@
+import sys
+
+import random
+import asyncio
+
+import pytest
+
+from sentry_sdk import capture_message, configure_scope
+from sentry_sdk.integrations.sanic import SanicIntegration
+
+from sanic import Sanic, request, response
+from sanic.exceptions import abort
+
+
+@pytest.fixture
+def app():
+    app = Sanic(__name__)
+
+    @app.route("/message")
+    def hi(request):
+        capture_message("hi")
+        return response.text("ok")
+
+    return app
+
+
+def test_request_data(sentry_init, app, capture_events):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    request, response = app.test_client.get("/message?foo=bar")
+    assert response.status == 200
+
+    event, = events
+    assert event["transaction"] == "hi"
+    assert event["request"]["env"] == {"REMOTE_ADDR": ""}
+    assert set(event["request"]["headers"]) == {
+        "accept",
+        "accept-encoding",
+        "host",
+        "user-agent",
+    }
+    assert event["request"]["query_string"] == "foo=bar"
+    assert event["request"]["url"].endswith("/message")
+    assert event["request"]["method"] == "GET"
+
+    # Assert that state is not leaked
+    events.clear()
+    capture_message("foo")
+    event, = events
+
+    assert "request" not in event
+    assert "transaction" not in event
+
+
+def test_errors(sentry_init, app, capture_events):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    @app.route("/error")
+    def myerror(request):
+        raise ValueError("oh no")
+
+    request, response = app.test_client.get("/error")
+    assert response.status == 500
+
+    event, = events
+    assert event["transaction"] == "myerror"
+    exception, = event["exception"]["values"]
+
+    assert exception["type"] == "ValueError"
+    assert exception["value"] == "oh no"
+    assert any(
+        frame["filename"].endswith("test_sanic.py")
+        for frame in exception["stacktrace"]["frames"]
+    )
+
+
+def test_bad_request_not_captured(sentry_init, app, capture_events):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index(request):
+        abort(400)
+
+    request, response = app.test_client.get("/")
+    assert response.status == 400
+
+    assert not events
+
+
+def test_error_in_errorhandler(sentry_init, app, capture_events):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    @app.route("/error")
+    def myerror(request):
+        raise ValueError("oh no")
+
+    @app.exception(ValueError)
+    def myhandler(request, exception):
+        1 / 0
+
+    request, response = app.test_client.get("/error")
+    assert response.status == 500
+
+    event1, event2 = events
+
+    exception, = event1["exception"]["values"]
+    assert exception["type"] == "ValueError"
+    assert any(
+        frame["filename"].endswith("test_sanic.py")
+        for frame in exception["stacktrace"]["frames"]
+    )
+
+    exception = event2["exception"]["values"][-1]
+    assert exception["type"] == "ZeroDivisionError"
+    assert any(
+        frame["filename"].endswith("test_sanic.py")
+        for frame in exception["stacktrace"]["frames"]
+    )
+
+
+def test_concurrency(sentry_init, app):
+    sentry_init(integrations=[SanicIntegration()])
+
+    @app.route("/context-check/")
+    async def context_check(request, i):
+        with configure_scope() as scope:
+            scope.set_tag("i", i)
+
+        await asyncio.sleep(random.random())
+
+        with configure_scope() as scope:
+            assert scope._tags["i"] == i
+
+        return response.text("ok")
+
+    async def task(i):
+        responses = []
+
+        await app.handle_request(
+            request.Request(
+                url_bytes="http://localhost/context-check/{i}".format(i=i).encode(
+                    "ascii"
+                ),
+                headers={},
+                version="1.1",
+                method="GET",
+                transport=None,
+            ),
+            write_callback=responses.append,
+            stream_callback=responses.append,
+        )
+
+        r, = responses
+        assert r.status == 200
+
+    async def runner():
+        await asyncio.gather(*(task(i) for i in range(1000)))
+
+    if sys.version_info < (3, 7):
+        loop = asyncio.new_event_loop()
+        asyncio.set_event_loop(loop)
+        loop.run_until_complete(runner())
+    else:
+        asyncio.run(runner())
+
+    with configure_scope() as scope:
+        assert not scope._tags
diff --git a/tests/integrations/serverless/test_serverless.py b/tests/integrations/serverless/test_serverless.py
new file mode 100644
index 0000000..56982bc
--- /dev/null
+++ b/tests/integrations/serverless/test_serverless.py
@@ -0,0 +1,46 @@
+import pytest
+
+from sentry_sdk.integrations.serverless import serverless_function
+
+
+def test_basic(sentry_init, capture_exceptions, monkeypatch):
+    sentry_init()
+    exceptions = capture_exceptions()
+
+    flush_calls = []
+
+    @serverless_function
+    def foo():
+        monkeypatch.setattr(
+            "sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1)
+        )
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        foo()
+
+    exception, = exceptions
+    assert isinstance(exception, ZeroDivisionError)
+
+    assert flush_calls == [1]
+
+
+def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
+    sentry_init()
+    exceptions = capture_exceptions()
+
+    flush_calls = []
+
+    monkeypatch.setattr("sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1))
+
+    @serverless_function(flush=False)
+    def foo():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        foo()
+
+    exception, = exceptions
+    assert isinstance(exception, ZeroDivisionError)
+
+    assert flush_calls == []
diff --git a/tests/integrations/sqlalchemy/__init__.py b/tests/integrations/sqlalchemy/__init__.py
new file mode 100644
index 0000000..b430bf6
--- /dev/null
+++ b/tests/integrations/sqlalchemy/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("sqlalchemy")
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
new file mode 100644
index 0000000..e918f95
--- /dev/null
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -0,0 +1,65 @@
+from sqlalchemy import Column, ForeignKey, Integer, String
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import relationship, sessionmaker
+from sqlalchemy import create_engine
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
+
+
+def test_orm_queries(sentry_init, capture_events):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True}
+    )
+    events = capture_events()
+
+    Base = declarative_base()
+
+    class Person(Base):
+        __tablename__ = "person"
+        id = Column(Integer, primary_key=True)
+        name = Column(String(250), nullable=False)
+
+    class Address(Base):
+        __tablename__ = "address"
+        id = Column(Integer, primary_key=True)
+        street_name = Column(String(250))
+        street_number = Column(String(250))
+        post_code = Column(String(250), nullable=False)
+        person_id = Column(Integer, ForeignKey("person.id"))
+        person = relationship(Person)
+
+    engine = create_engine("sqlite:///:memory:")
+    Base.metadata.create_all(engine)
+
+    Session = sessionmaker(bind=engine)
+    session = Session()
+
+    bob = Person(name="Bob")
+    session.add(bob)
+
+    assert session.query(Person).first() == bob
+
+    capture_message("hi")
+
+    event, = events
+
+    for crumb in event["breadcrumbs"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"][-2:] == [
+        {
+            "category": "query",
+            "data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
+            "message": "INSERT INTO person (name) VALUES (?)",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {"db.params": [1, 0], "db.paramstyle": "qmark"},
+            "message": "SELECT person.id AS person_id, person.name AS person_name \n"
+            "FROM person\n"
+            " LIMIT ? OFFSET ?",
+            "type": "default",
+        },
+    ]
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
new file mode 100644
index 0000000..53d49ea
--- /dev/null
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -0,0 +1,111 @@
+import platform
+import sys
+
+import pytest
+
+try:
+    from urllib.request import urlopen
+except ImportError:
+    from urllib import urlopen
+
+try:
+    from httplib import HTTPSConnection
+except ImportError:
+    from http.client import HTTPSConnection
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.stdlib import StdlibIntegration
+
+
+def test_crumb_capture(sentry_init, capture_events):
+    sentry_init(integrations=[StdlibIntegration()])
+    events = capture_events()
+
+    url = "https://httpbin.org/status/200"
+    response = urlopen(url)
+    assert response.getcode() == 200
+    capture_message("Testing!")
+
+    event, = events
+    crumb, = event["breadcrumbs"]
+    assert crumb["type"] == "http"
+    assert crumb["category"] == "httplib"
+    assert crumb["data"] == {
+        "url": url,
+        "method": "GET",
+        "status_code": 200,
+        "reason": "OK",
+    }
+
+
+def test_crumb_capture_hint(sentry_init, capture_events):
+    def before_breadcrumb(crumb, hint):
+        if "httplib_response" in hint:
+            con = hint["httplib_response"].getheader("Connection")
+            assert con.lower() == "close"
+            crumb["data"]["extra"] = "foo"
+        return crumb
+
+    sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
+    events = capture_events()
+
+    url = "https://httpbin.org/status/200"
+    response = urlopen(url)
+    assert response.getcode() == 200
+    capture_message("Testing!")
+
+    event, = events
+    crumb, = event["breadcrumbs"]
+    assert crumb["type"] == "http"
+    assert crumb["category"] == "httplib"
+    assert crumb["data"] == {
+        "url": url,
+        "method": "GET",
+        "status_code": 200,
+        "reason": "OK",
+        "extra": "foo",
+    }
+
+    if platform.python_implementation() != "PyPy":
+        assert sys.getrefcount(response) == 2
+
+
+def test_httplib_misuse(sentry_init, capture_events):
+    """HTTPConnection.getresponse must be called after every call to
+    HTTPConnection.request. However, if somebody does not abide by
+    this contract, we still should handle this gracefully and not
+    send mixed breadcrumbs.
+
+    Test whether our breadcrumbs are coherent when somebody uses HTTPConnection
+    wrongly.
+    """
+
+    sentry_init()
+    events = capture_events()
+
+    conn = HTTPSConnection("httpbin.org", 443)
+    conn.request("GET", "/anything/foo")
+
+    with pytest.raises(Exception):
+        # This raises an exception, because we didn't call `getresponse` for
+        # the previous request yet.
+        #
+        # This call should not affect our breadcrumb.
+        conn.request("POST", "/anything/bar")
+
+    response = conn.getresponse()
+    assert response._method == "GET"
+
+    capture_message("Testing!")
+
+    event, = events
+    crumb, = event["breadcrumbs"]
+
+    assert crumb["type"] == "http"
+    assert crumb["category"] == "httplib"
+    assert crumb["data"] == {
+        "url": "https://httpbin.org/anything/foo",
+        "method": "GET",
+        "status_code": 200,
+        "reason": "OK",
+    }
diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py
new file mode 100644
index 0000000..1fd3d13
--- /dev/null
+++ b/tests/integrations/stdlib/test_subprocess.py
@@ -0,0 +1,183 @@
+import os
+import platform
+import subprocess
+import sys
+
+import pytest
+
+from sentry_sdk import Hub, capture_message
+from sentry_sdk._compat import PY2
+from sentry_sdk.integrations.stdlib import StdlibIntegration
+
+
+if PY2:
+    from collections import Mapping
+else:
+    from collections.abc import Mapping
+
+
+class ImmutableDict(Mapping):
+    def __init__(self, inner):
+        self.inner = inner
+
+    def __getitem__(self, key):
+        return self.inner[key]
+
+    def __iter__(self):
+        return iter(self.inner)
+
+    def __len__(self):
+        return len(self.inner)
+
+
+@pytest.mark.parametrize("positional_args", [True, False])
+@pytest.mark.parametrize(
+    "iterator",
+    [
+        pytest.param(
+            True,
+            marks=pytest.mark.skipif(
+                platform.python_implementation() == "PyPy",
+                reason="https://bitbucket.org/pypy/pypy/issues/3050/subprocesspopen-only-accepts-sequences",
+            ),
+        ),
+        False,
+    ],
+    ids=("as_iterator", "as_list"),
+)
+@pytest.mark.parametrize("env_mapping", [None, os.environ, ImmutableDict(os.environ)])
+@pytest.mark.parametrize("with_cwd", [True, False])
+def test_subprocess_basic(
+    sentry_init,
+    capture_events,
+    monkeypatch,
+    positional_args,
+    iterator,
+    env_mapping,
+    with_cwd,
+):
+    monkeypatch.setenv("FOO", "bar")
+
+    old_environ = dict(os.environ)
+
+    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    with Hub.current.start_span(transaction="foo", op="foo") as span:
+        args = [
+            sys.executable,
+            "-c",
+            "import os; "
+            "import sentry_sdk; "
+            "from sentry_sdk.integrations.stdlib import get_subprocess_traceparent_headers; "
+            "sentry_sdk.init(); "
+            "assert os.environ['FOO'] == 'bar'; "
+            "print(dict(get_subprocess_traceparent_headers()))",
+        ]
+
+        if iterator:
+            args = iter(args)
+
+        if positional_args:
+            a = (
+                args,
+                0,  # bufsize
+                None,  # executable
+                None,  # stdin
+                subprocess.PIPE,  # stdout
+                None,  # stderr
+                None,  # preexec_fn
+                False,  # close_fds
+                False,  # shell
+                os.getcwd() if with_cwd else None,  # cwd
+            )
+
+            if env_mapping is not None:
+                a += (env_mapping,)
+
+            popen = subprocess.Popen(*a)
+
+        else:
+            kw = {"args": args, "stdout": subprocess.PIPE}
+
+            if with_cwd:
+                kw["cwd"] = os.getcwd()
+
+            if env_mapping is not None:
+                kw["env"] = env_mapping
+
+            popen = subprocess.Popen(**kw)
+
+        output, unused_err = popen.communicate()
+        retcode = popen.poll()
+        assert not retcode
+
+    assert os.environ == old_environ
+
+    assert span.trace_id in str(output)
+
+    capture_message("hi")
+
+    transaction_event, message_event, = events
+
+    assert message_event["message"] == "hi"
+
+    data = {"subprocess.cwd": os.getcwd()} if with_cwd else {}
+    crumb, = message_event["breadcrumbs"]
+    assert crumb == {
+        "category": "subprocess",
+        "data": data,
+        "message": crumb["message"],
+        "timestamp": crumb["timestamp"],
+        "type": "subprocess",
+    }
+
+    if not iterator:
+        assert crumb["message"].startswith(sys.executable + " ")
+
+    assert transaction_event["type"] == "transaction"
+
+    subprocess_init_span, subprocess_wait_span, subprocess_communicate_span = transaction_event[
+        "spans"
+    ]
+
+    assert subprocess_init_span["op"] == "subprocess"
+    assert subprocess_communicate_span["op"] == "subprocess.communicate"
+    assert subprocess_wait_span["op"] == "subprocess.wait"
+
+    # span hierarchy
+    assert (
+        subprocess_wait_span["parent_span_id"] == subprocess_communicate_span["span_id"]
+    )
+    assert (
+        subprocess_communicate_span["parent_span_id"]
+        == subprocess_init_span["parent_span_id"]
+        == transaction_event["contexts"]["trace"]["span_id"]
+    )
+
+    # common data
+    assert (
+        subprocess_init_span["tags"]["subprocess.pid"]
+        == subprocess_wait_span["tags"]["subprocess.pid"]
+        == subprocess_communicate_span["tags"]["subprocess.pid"]
+    )
+
+    # data of init span
+    assert subprocess_init_span["data"] == data
+    if iterator:
+        assert "iterator" in subprocess_init_span["description"]
+        assert subprocess_init_span["description"].startswith("<")
+    else:
+        assert sys.executable + " -c" in subprocess_init_span["description"]
+
+
+def test_subprocess_invalid_args(sentry_init):
+    sentry_init(integrations=[StdlibIntegration()])
+
+    with pytest.raises(TypeError) as excinfo:
+        subprocess.Popen()
+
+    if PY2:
+        assert "__init__() takes at least 2 arguments (1 given)" in str(excinfo.value)
+    else:
+        assert "missing 1 required positional argument: 'args" in str(excinfo.value)
diff --git a/tests/integrations/test_gnu_backtrace.py b/tests/integrations/test_gnu_backtrace.py
new file mode 100644
index 0000000..28614fb
--- /dev/null
+++ b/tests/integrations/test_gnu_backtrace.py
@@ -0,0 +1,102 @@
+import pytest
+
+from sentry_sdk import capture_exception
+from sentry_sdk.integrations.gnu_backtrace import GnuBacktraceIntegration
+
+LINES = r"""
+0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99d31a6]
+1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string, std::allocator > const&, int)+0x22) [0x372c822]
+10. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1a12) [0x6ae45d2]
+10. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x11af) [0x75c68ff]
+10. clickhouse-server(ThreadPoolImpl::worker(std::_List_iterator)+0x1ab) [0x6f90c1b]
+11. clickhouse-server() [0xae06ddf]
+11. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
+11. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::shared_ptr const&, std::shared_ptr const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75c7516]
+12. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7f3bbc568184]
+12. clickhouse-server(DB::ExpressionAnalyzer::getConstActions()+0xc9) [0x6a0b059]
+12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75c8276]
+13. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7f3bbbb8303d]
+13. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr const&, DB::Context const&, std::vector, std::allocator >, std::allocator, std::allocator > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75d4067]
+13. clickhouse-server(DB::evaluateConstantExpression(std::shared_ptr const&, DB::Context const&)+0x3ed) [0x656bfdd]
+14. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75b0298]
+14. clickhouse-server(DB::makeExplicitSet(DB::ASTFunction const*, DB::Block const&, bool, DB::Context const&, DB::SizeLimits const&, std::unordered_map, DB::PreparedSetKey::Hash, std::equal_to, std::allocator > > >&)+0x382) [0x6adf692]
+15. clickhouse-server() [0x7664c79]
+15. clickhouse-server(DB::ActionsVisitor::makeSet(DB::ASTFunction const*, DB::Block const&)+0x2a7) [0x6ae2227]
+16. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1973) [0x6ae4533]
+16. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum)+0x8a) [0x76669fa]
+17. clickhouse-server(DB::ActionsVisitor::visit(std::shared_ptr const&)+0x1324) [0x6ae3ee4]
+17. clickhouse-server(DB::TCPHandler::runImpl()+0x4b9) [0x30973c9]
+18. clickhouse-server(DB::ExpressionAnalyzer::getRootActions(std::shared_ptr const&, bool, std::shared_ptr&, bool)+0xdb) [0x6a0a63b]
+18. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x30985ab]
+19. clickhouse-server(DB::ExpressionAnalyzer::appendGroupBy(DB::ExpressionActionsChain&, bool)+0x100) [0x6a0b4f0]
+19. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x9b53e4f]
+2. clickhouse-server(DB::FunctionTuple::getReturnTypeImpl(std::vector, std::allocator > > const&) const+0x122) [0x3a2a0f2]
+2. clickhouse-server(DB::readException(DB::Exception&, DB::ReadBuffer&, std::__cxx11::basic_string, std::allocator > const&)+0x21f) [0x6fb253f]
+2. clickhouse-server(void DB::readDateTimeTextFallback(long&, DB::ReadBuffer&, DateLUTImpl const&)+0x318) [0x99ffed8]
+20. clickhouse-server(DB::InterpreterSelectQuery::analyzeExpressions(DB::QueryProcessingStage::Enum, bool)+0x364) [0x6437fa4]
+20. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0x16a) [0x9b5422a]
+21. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr const&, bool)+0x36d) [0x643c28d]
+21. clickhouse-server(Poco::PooledThread::run()+0x77) [0x9c70f37]
+22. clickhouse-server(DB::InterpreterSelectQuery::executeWithMultipleStreams()+0x50) [0x643ecd0]
+22. clickhouse-server(Poco::ThreadImpl::runnableEntry(void*)+0x38) [0x9c6caa8]
+23. clickhouse-server() [0xa3c68cf]
+23. clickhouse-server(DB::InterpreterSelectWithUnionQuery::executeWithMultipleStreams()+0x6c) [0x644805c]
+24. /lib/x86_64-linux-gnu/libpthread.so.0(+0x8184) [0x7fe839d2d184]
+24. clickhouse-server(DB::InterpreterSelectWithUnionQuery::execute()+0x38) [0x6448658]
+25. /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7fe83934803d]
+25. clickhouse-server() [0x65744ef]
+26. clickhouse-server(DB::executeQuery(std::__cxx11::basic_string, std::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool)+0x81) [0x6576141]
+27. clickhouse-server(DB::TCPHandler::runImpl()+0x752) [0x3739f82]
+28. clickhouse-server(DB::TCPHandler::run()+0x2b) [0x373a5cb]
+29. clickhouse-server(Poco::Net::TCPServerConnection::start()+0xf) [0x708e63f]
+3. clickhouse-server(DB::Connection::receiveException()+0x81) [0x67d3ad1]
+3. clickhouse-server(DB::DefaultFunctionBuilder::getReturnTypeImpl(std::vector > const&) const+0x223) [0x38ac3b3]
+3. clickhouse-server(DB::FunctionComparison::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr const&, std::shared_ptr const&, bool, unsigned long)+0xbb3) [0x411dee3]
+30. clickhouse-server(Poco::Net::TCPServerDispatcher::run()+0xe9) [0x708ed79]
+31. clickhouse-server(Poco::PooledThread::run()+0x81) [0x7142011]
+4. clickhouse-server(DB::Connection::receivePacket()+0x767) [0x67d9cd7]
+4. clickhouse-server(DB::FunctionBuilderImpl::getReturnTypeWithoutLowCardinality(std::vector > const&) const+0x75) [0x6869635]
+4. clickhouse-server(DB::FunctionComparison::executeImpl(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x576) [0x41ab006]
+5. clickhouse-server(DB::FunctionBuilderImpl::getReturnType(std::vector > const&) const+0x350) [0x6869f10]
+5. clickhouse-server(DB::MultiplexedConnections::receivePacket()+0x7e) [0x67e7ede]
+5. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector > const&, unsigned long, unsigned long)+0x3e2) [0x7933492]
+6. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map, std::allocator >, unsigned long, std::hash, std::allocator > >, std::equal_to, std::allocator > >, std::allocator, std::allocator > const, unsigned long> > >&) const+0x61a) [0x7ae093a]
+6. clickhouse-server(DB::FunctionBuilderImpl::build(std::vector > const&) const+0x3c) [0x38accfc]
+6. clickhouse-server(DB::RemoteBlockInputStream::readImpl()+0x87) [0x631da97]
+7. clickhouse-server(DB::ExpressionActions::addImpl(DB::ExpressionAction, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x552) [0x6a00052]
+7. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7ae1e06]
+7. clickhouse-server(DB::IBlockInputStream::read()+0x178) [0x63075e8]
+8. clickhouse-server(DB::ExpressionActions::add(DB::ExpressionAction const&, std::vector, std::allocator >, std::allocator, std::allocator > > >&)+0x42) [0x6a00422]
+8. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr const&, std::shared_ptr const&, std::__cxx11::basic_string, std::allocator > const&, bool)+0x711) [0x79970d1]
+8. clickhouse-server(DB::ParallelInputsProcessor::thread(std::shared_ptr, unsigned long)+0x2f1) [0x64467c1]
+9. clickhouse-server() [0x75bd5a3]
+9. clickhouse-server(DB::ScopeStack::addAction(DB::ExpressionAction const&)+0xd2) [0x6ae04d2]
+9. clickhouse-server(ThreadFromGlobalPool::ThreadFromGlobalPool::process()::{lambda()#1}>(DB::ParallelInputsProcessor::process()::{lambda()#1}&&)::{lambda()#1}::operator()() const+0x6d) [0x644722d]
+"""
+
+
+@pytest.mark.parametrize("input", LINES.strip().splitlines())
+def test_basic(sentry_init, capture_events, input):
+    sentry_init(integrations=[GnuBacktraceIntegration()])
+    events = capture_events()
+
+    try:
+        raise ValueError(input)
+    except ValueError:
+        capture_exception()
+
+    event, = events
+    exception, = event["exception"]["values"]
+
+    assert (
+        exception["value"]
+        == ""
+    )
+    frame, = exception["stacktrace"]["frames"][1:]
+
+    if "function" not in frame:
+        assert "clickhouse-server()" in input or "pthread" in input
+    else:
+        assert frame["function"]
+        assert ")" not in frame["function"] and "(" not in frame["function"]
+        assert frame["function"] in input
diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py
new file mode 100644
index 0000000..14a189a
--- /dev/null
+++ b/tests/integrations/threading/test_threading.py
@@ -0,0 +1,108 @@
+import gc
+
+from threading import Thread
+
+import pytest
+
+from sentry_sdk import configure_scope
+from sentry_sdk.integrations.threading import ThreadingIntegration
+
+
+@pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
+def test_handles_exceptions(sentry_init, capture_events, integrations):
+    sentry_init(default_integrations=False, integrations=integrations)
+    events = capture_events()
+
+    def crash():
+        1 / 0
+
+    t = Thread(target=crash)
+    t.start()
+    t.join()
+
+    if integrations:
+        event, = events
+
+        exception, = event["exception"]["values"]
+        assert exception["type"] == "ZeroDivisionError"
+        assert exception["mechanism"] == {"type": "threading", "handled": False}
+    else:
+        assert not events
+
+
+@pytest.mark.parametrize("propagate_hub", (True, False))
+def test_propagates_hub(sentry_init, capture_events, propagate_hub):
+    sentry_init(
+        default_integrations=False,
+        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
+    )
+    events = capture_events()
+
+    def stage1():
+        with configure_scope() as scope:
+            scope.set_tag("stage1", True)
+
+        t = Thread(target=stage2)
+        t.start()
+        t.join()
+
+    def stage2():
+        1 / 0
+
+    t = Thread(target=stage1)
+    t.start()
+    t.join()
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+
+    assert exception["type"] == "ZeroDivisionError"
+    assert exception["mechanism"] == {"type": "threading", "handled": False}
+
+    if propagate_hub:
+        assert event["tags"]["stage1"] is True
+    else:
+        assert "stage1" not in event.get("tags", {})
+
+
+def test_circular_references(sentry_init, request):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+
+    gc.collect()
+    gc.disable()
+    request.addfinalizer(gc.enable)
+
+    class MyThread(Thread):
+        def run(self):
+            pass
+
+    t = MyThread()
+    t.start()
+    t.join()
+    del t
+
+    assert not gc.collect()
+
+
+def test_double_patching(sentry_init, capture_events):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+    events = capture_events()
+
+    class MyThread(Thread):
+        def run(self):
+            1 / 0
+
+    ts = []
+    for _ in range(10):
+        t = MyThread()
+        t.start()
+        ts.append(t)
+
+    for t in ts:
+        t.join()
+
+    assert len(events) == 10
+    for event in events:
+        exception, = event["exception"]["values"]
+        assert exception["type"] == "ZeroDivisionError"
diff --git a/tests/integrations/tornado/__init__.py b/tests/integrations/tornado/__init__.py
new file mode 100644
index 0000000..a6ccd8a
--- /dev/null
+++ b/tests/integrations/tornado/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+tornado = pytest.importorskip("tornado")
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
new file mode 100644
index 0000000..8070947
--- /dev/null
+++ b/tests/integrations/tornado/test_tornado.py
@@ -0,0 +1,192 @@
+import json
+
+import pytest
+
+from sentry_sdk import configure_scope
+from sentry_sdk.integrations.tornado import TornadoIntegration
+
+from tornado.web import RequestHandler, Application, HTTPError
+from tornado.testing import AsyncHTTPTestCase
+
+
+@pytest.fixture
+def tornado_testcase(request):
+    # Take the unittest class provided by tornado and manually call its setUp
+    # and tearDown.
+    #
+    # The pytest plugins for tornado seem too complicated to use, as they for
+    # some reason assume I want to write my tests in async code.
+    def inner(app):
+        class TestBogus(AsyncHTTPTestCase):
+            def get_app(self):
+                return app
+
+            def bogustest(self):
+                # We need to pass a valid test method name to the ctor, so this
+                # is the method. It does nothing.
+                pass
+
+        self = TestBogus("bogustest")
+        self.setUp()
+        request.addfinalizer(self.tearDown)
+        return self
+
+    return inner
+
+
+class CrashingHandler(RequestHandler):
+    def get(self):
+        with configure_scope() as scope:
+            scope.set_tag("foo", 42)
+        1 / 0
+
+
+def test_basic(tornado_testcase, sentry_init, capture_events):
+    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client = tornado_testcase(Application([(r"/hi", CrashingHandler)]))
+
+    response = client.fetch(
+        "/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"}
+    )
+    assert response.code == 500
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+    request = event["request"]
+    host = request["headers"]["Host"]
+    assert event["request"] == {
+        "env": {"REMOTE_ADDR": "127.0.0.1"},
+        "headers": {
+            "Accept-Encoding": "gzip",
+            "Connection": "close",
+            "Host": host,
+            "Cookie": "name=value; name2=value2; name3=value3",
+        },
+        "cookies": {"name": "value", "name2": "value2", "name3": "value3"},
+        "method": "GET",
+        "query_string": "foo=bar",
+        "url": "http://{host}/hi".format(host=host),
+    }
+
+    assert event["tags"] == {"foo": 42}
+    assert (
+        event["transaction"]
+        == "tests.integrations.tornado.test_tornado.CrashingHandler.get"
+    )
+
+    with configure_scope() as scope:
+        assert not scope._tags
+
+
+def test_400_not_logged(tornado_testcase, sentry_init, capture_events):
+    sentry_init(integrations=[TornadoIntegration()])
+    events = capture_events()
+
+    class CrashingHandler(RequestHandler):
+        def get(self):
+            raise HTTPError(400, "Oops")
+
+    client = tornado_testcase(Application([(r"/", CrashingHandler)]))
+
+    response = client.fetch("/")
+    assert response.code == 400
+
+    assert not events
+
+
+def test_user_auth(tornado_testcase, sentry_init, capture_events):
+    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
+    events = capture_events()
+
+    class UserHandler(RequestHandler):
+        def get(self):
+            1 / 0
+
+        def get_current_user(self):
+            return 42
+
+    class NoUserHandler(RequestHandler):
+        def get(self):
+            1 / 0
+
+    client = tornado_testcase(
+        Application([(r"/auth", UserHandler), (r"/noauth", NoUserHandler)])
+    )
+
+    # has user
+    response = client.fetch("/auth")
+    assert response.code == 500
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+    assert event["user"] == {"is_authenticated": True}
+
+    events.clear()
+
+    # has no user
+    response = client.fetch("/noauth")
+    assert response.code == 500
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+    assert "user" not in event
+
+
+def test_formdata(tornado_testcase, sentry_init, capture_events):
+    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
+    events = capture_events()
+
+    class FormdataHandler(RequestHandler):
+        def post(self):
+            raise ValueError(json.dumps(sorted(self.request.body_arguments)))
+
+    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))
+
+    response = client.fetch(
+        "/form?queryarg=1",
+        method="POST",
+        headers={"Content-Type": "application/x-www-form-urlencoded"},
+        body=b"field1=value1&field2=value2",
+    )
+
+    assert response.code == 500
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["value"] == '["field1", "field2"]'
+    assert event["request"]["data"] == {"field1": ["value1"], "field2": ["value2"]}
+
+
+def test_json(tornado_testcase, sentry_init, capture_events):
+    sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
+    events = capture_events()
+
+    class FormdataHandler(RequestHandler):
+        def post(self):
+            raise ValueError(json.dumps(sorted(self.request.body_arguments)))
+
+    client = tornado_testcase(Application([(r"/form", FormdataHandler)]))
+
+    response = client.fetch(
+        "/form?queryarg=1",
+        method="POST",
+        headers={"Content-Type": "application/json"},
+        body=b"""
+        {"foo": {"bar": 42}}
+        """,
+    )
+
+    assert response.code == 500
+
+    event, = events
+    exception, = event["exception"]["values"]
+    assert exception["value"] == "[]"
+    assert event
+    assert event["request"]["data"] == {"foo": {"bar": 42}}
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
new file mode 100644
index 0000000..8c920f4
--- /dev/null
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -0,0 +1,111 @@
+from werkzeug.test import Client
+import pytest
+
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+
+
+@pytest.fixture
+def crashing_app():
+    def app(environ, start_response):
+        1 / 0
+
+    return app
+
+
+class IterableApp(object):
+    def __init__(self, iterable):
+        self.iterable = iterable
+
+    def __call__(self, environ, start_response):
+        return self.iterable
+
+
+class ExitingIterable(object):
+    def __init__(self, exc_func):
+        self._exc_func = exc_func
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        raise self._exc_func()
+
+    def next(self):
+        return type(self).__next__(self)
+
+
+def test_basic(sentry_init, crashing_app, capture_events):
+    sentry_init(send_default_pii=True)
+    app = SentryWsgiMiddleware(crashing_app)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(ZeroDivisionError):
+        client.get("/")
+
+    event, = events
+
+    assert event["transaction"] == "generic WSGI request"
+
+    assert event["request"] == {
+        "env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
+        "headers": {"Host": "localhost"},
+        "method": "GET",
+        "query_string": "",
+        "url": "http://localhost/",
+    }
+
+
+@pytest.fixture(params=[0, None])
+def test_systemexit_zero_is_ignored(sentry_init, capture_events, request):
+    zero_code = request.param
+    sentry_init(send_default_pii=True)
+    iterable = ExitingIterable(lambda: SystemExit(zero_code))
+    app = SentryWsgiMiddleware(IterableApp(iterable))
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(SystemExit):
+        client.get("/")
+
+    assert len(events) == 0
+
+
+@pytest.fixture(params=["", "foo", 1, 2])
+def test_systemexit_nonzero_is_captured(sentry_init, capture_events, request):
+    nonzero_code = request.param
+    sentry_init(send_default_pii=True)
+    iterable = ExitingIterable(lambda: SystemExit(nonzero_code))
+    app = SentryWsgiMiddleware(IterableApp(iterable))
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(SystemExit):
+        client.get("/")
+
+    event, = events
+
+    assert "exception" in event
+    exc = event["exception"]["values"][-1]
+    assert exc["type"] == "SystemExit"
+    assert exc["value"] == nonzero_code
+    assert event["level"] == "error"
+
+
+def test_keyboard_interrupt_is_captured(sentry_init, capture_events):
+    sentry_init(send_default_pii=True)
+    iterable = ExitingIterable(lambda: KeyboardInterrupt())
+    app = SentryWsgiMiddleware(IterableApp(iterable))
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(KeyboardInterrupt):
+        client.get("/")
+
+    event, = events
+
+    assert "exception" in event
+    exc = event["exception"]["values"][-1]
+    assert exc["type"] == "KeyboardInterrupt"
+    assert exc["value"] == ""
+    assert event["level"] == "error"
diff --git a/tests/test_basics.py b/tests/test_basics.py
new file mode 100644
index 0000000..1d5a69b
--- /dev/null
+++ b/tests/test_basics.py
@@ -0,0 +1,303 @@
+import logging
+
+import pytest
+
+from sentry_sdk import (
+    Client,
+    push_scope,
+    configure_scope,
+    capture_exception,
+    capture_message,
+    add_breadcrumb,
+    last_event_id,
+    Hub,
+)
+from sentry_sdk.integrations.logging import LoggingIntegration
+
+
+def test_processors(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    with configure_scope() as scope:
+
+        def error_processor(event, exc_info):
+            event["exception"]["values"][0]["value"] += " whatever"
+            return event
+
+        scope.add_error_processor(error_processor, ValueError)
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        capture_exception()
+
+    event, = events
+
+    assert event["exception"]["values"][0]["value"] == "aha! whatever"
+
+
+def test_event_id(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        event_id = capture_exception()
+        int(event_id, 16)
+        assert len(event_id) == 32
+
+    event, = events
+    assert event["event_id"] == event_id
+    assert last_event_id() == event_id
+    assert Hub.current.last_event_id() == event_id
+
+
+def test_option_callback(sentry_init, capture_events):
+    drop_events = False
+    drop_breadcrumbs = False
+
+    def before_send(event, hint):
+        assert isinstance(hint["exc_info"][1], ValueError)
+        if not drop_events:
+            event["extra"] = {"foo": "bar"}
+            return event
+
+    def before_breadcrumb(crumb, hint):
+        assert hint == {"foo": 42}
+        if not drop_breadcrumbs:
+            crumb["data"] = {"foo": "bar"}
+            return crumb
+
+    sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb)
+    events = capture_events()
+
+    def do_this():
+        add_breadcrumb(message="Hello", hint={"foo": 42})
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    do_this()
+    drop_breadcrumbs = True
+    do_this()
+    drop_events = True
+    do_this()
+
+    normal, no_crumbs = events
+
+    assert normal["exception"]["values"][0]["type"] == "ValueError"
+    crumb, = normal["breadcrumbs"]
+    assert "timestamp" in crumb
+    assert crumb["message"] == "Hello"
+    assert crumb["data"] == {"foo": "bar"}
+    assert crumb["type"] == "default"
+
+
+def test_breadcrumb_arguments(sentry_init, capture_events):
+    assert_hint = {"bar": 42}
+
+    def before_breadcrumb(crumb, hint):
+        assert crumb["foo"] == 42
+        assert hint == assert_hint
+
+    sentry_init(before_breadcrumb=before_breadcrumb)
+
+    add_breadcrumb(foo=42, hint=dict(bar=42))
+    add_breadcrumb(dict(foo=42), dict(bar=42))
+    add_breadcrumb(dict(foo=42), hint=dict(bar=42))
+    add_breadcrumb(crumb=dict(foo=42), hint=dict(bar=42))
+
+    assert_hint.clear()
+    add_breadcrumb(foo=42)
+    add_breadcrumb(crumb=dict(foo=42))
+
+
+def test_push_scope(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    with push_scope() as scope:
+        scope.level = "warning"
+        try:
+            1 / 0
+        except Exception as e:
+            capture_exception(e)
+
+    event, = events
+
+    assert event["level"] == "warning"
+    assert "exception" in event
+
+
+def test_push_scope_null_client(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    Hub.current.bind_client(None)
+
+    with push_scope() as scope:
+        scope.level = "warning"
+        try:
+            1 / 0
+        except Exception as e:
+            capture_exception(e)
+
+    assert len(events) == 0
+
+
+@pytest.mark.parametrize("null_client", (True, False))
+def test_push_scope_callback(sentry_init, null_client, capture_events):
+    sentry_init()
+
+    if null_client:
+        Hub.current.bind_client(None)
+
+    outer_scope = Hub.current._stack[-1][1]
+
+    calls = []
+
+    @push_scope
+    def _(scope):
+        assert scope is Hub.current._stack[-1][1]
+        assert scope is not outer_scope
+        calls.append(1)
+
+    # push_scope always needs to execute the callback regardless of
+    # client state, because that actually runs usercode in it, not
+    # just scope config code
+    assert calls == [1]
+
+    # Assert scope gets popped correctly
+    assert Hub.current._stack[-1][1] is outer_scope
+
+
+def test_breadcrumbs(sentry_init, capture_events):
+    sentry_init(max_breadcrumbs=10)
+    events = capture_events()
+
+    for i in range(20):
+        add_breadcrumb(
+            category="auth", message="Authenticated user %s" % i, level="info"
+        )
+
+    capture_exception(ValueError())
+    event, = events
+
+    assert len(event["breadcrumbs"]) == 10
+    assert "user 10" in event["breadcrumbs"][0]["message"]
+    assert "user 19" in event["breadcrumbs"][-1]["message"]
+
+    del events[:]
+
+    for i in range(2):
+        add_breadcrumb(
+            category="auth", message="Authenticated user %s" % i, level="info"
+        )
+
+    with configure_scope() as scope:
+        scope.clear()
+
+    capture_exception(ValueError())
+    event, = events
+    assert len(event["breadcrumbs"]) == 0
+
+
+def test_integration_scoping():
+    logger = logging.getLogger("test_basics")
+    events = []
+    logging_integration = LoggingIntegration(event_level=logging.WARNING)
+
+    # This client uses the logging integration
+    client_with_logging = Client(
+        transport=events.append,
+        default_integrations=False,
+        integrations=[logging_integration],
+    )
+    Hub.current.bind_client(client_with_logging)
+    logger.warning("This is a warning")
+
+    # This client does not
+    client_without_logging = Client(transport=events.append, default_integrations=False)
+    Hub.current.bind_client(client_without_logging)
+    logger.warning("This is not a warning")
+
+    assert len(events) == 1
+
+
+def test_client_initialized_within_scope(sentry_init, caplog):
+    caplog.set_level(logging.WARNING)
+
+    sentry_init(debug=True)
+
+    with push_scope():
+        sentry_init()
+
+    record, = (x for x in caplog.records if x.levelname == "WARNING")
+
+    assert record.msg.startswith("init() called inside of pushed scope.")
+
+
+def test_scope_leaks_cleaned_up(sentry_init, caplog):
+    caplog.set_level(logging.WARNING)
+
+    sentry_init(debug=True)
+
+    old_stack = list(Hub.current._stack)
+
+    with push_scope():
+        push_scope()
+
+    assert Hub.current._stack == old_stack
+
+    record, = (x for x in caplog.records if x.levelname == "WARNING")
+
+    assert record.message.startswith("Leaked 1 scopes:")
+
+
+def test_scope_popped_too_soon(sentry_init, caplog):
+    caplog.set_level(logging.ERROR)
+
+    sentry_init(debug=True)
+
+    old_stack = list(Hub.current._stack)
+
+    with push_scope():
+        Hub.current.pop_scope_unsafe()
+
+    assert Hub.current._stack == old_stack
+
+    record, = (x for x in caplog.records if x.levelname == "ERROR")
+
+    assert record.message == ("Scope popped too soon. Popped 1 scopes too many.")
+
+
+def test_scope_event_processor_order(sentry_init, capture_events):
+    def before_send(event, hint):
+        event["message"] += "baz"
+        return event
+
+    sentry_init(debug=True, before_send=before_send)
+    events = capture_events()
+
+    with push_scope() as scope:
+
+        @scope.add_event_processor
+        def foo(event, hint):
+            event["message"] += "foo"
+            return event
+
+        with push_scope() as scope:
+
+            @scope.add_event_processor
+            def bar(event, hint):
+                event["message"] += "bar"
+                return event
+
+            capture_message("hi")
+
+    event, = events
+
+    assert event["message"] == "hifoobarbaz"
diff --git a/tests/test_client.py b/tests/test_client.py
new file mode 100644
index 0000000..97960fb
--- /dev/null
+++ b/tests/test_client.py
@@ -0,0 +1,712 @@
+# coding: utf-8
+import json
+import pytest
+import subprocess
+import sys
+import time
+
+from textwrap import dedent
+from sentry_sdk import Hub, Client, configure_scope, capture_message, capture_exception
+from sentry_sdk.hub import HubMeta
+from sentry_sdk.transport import Transport
+from sentry_sdk._compat import reraise, text_type, PY2
+from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
+
+if PY2:
+    # Importing ABCs from collections is deprecated, and will stop working in 3.8
+    # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
+    from collections import Mapping
+else:
+    # New in 3.3
+    # https://docs.python.org/3/library/collections.abc.html
+    from collections.abc import Mapping
+
+
+class EventCaptured(Exception):
+    pass
+
+
+class _TestTransport(Transport):
+    def capture_event(self, event):
+        raise EventCaptured(event)
+
+
+def test_transport_option(monkeypatch):
+    dsn = "https://foo@sentry.io/123"
+    dsn2 = "https://bar@sentry.io/124"
+    assert str(Client(dsn=dsn).dsn) == dsn
+    assert Client().dsn is None
+
+    monkeypatch.setenv("SENTRY_DSN", dsn)
+    transport = Transport({"dsn": dsn2})
+    assert text_type(transport.parsed_dsn) == dsn2
+    assert str(Client(transport=transport).dsn) == dsn
+
+
+def test_proxy_http_use(monkeypatch):
+    client = Client("http://foo@sentry.io/123", http_proxy="http://localhost/123")
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_https_use(monkeypatch):
+    client = Client("https://foo@sentry.io/123", http_proxy="https://localhost/123")
+    assert client.transport._pool.proxy.scheme == "https"
+
+
+def test_proxy_both_select_http(monkeypatch):
+    client = Client(
+        "http://foo@sentry.io/123",
+        https_proxy="https://localhost/123",
+        http_proxy="http://localhost/123",
+    )
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_both_select_https(monkeypatch):
+    client = Client(
+        "https://foo@sentry.io/123",
+        https_proxy="https://localhost/123",
+        http_proxy="http://localhost/123",
+    )
+    assert client.transport._pool.proxy.scheme == "https"
+
+
+def test_proxy_http_fallback_http(monkeypatch):
+    client = Client("https://foo@sentry.io/123", http_proxy="http://localhost/123")
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_none_noenv(monkeypatch):
+    client = Client("http://foo@sentry.io/123")
+    assert client.transport._pool.proxy is None
+
+
+def test_proxy_none_httpenv_select(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    client = Client("http://foo@sentry.io/123")
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_none_httpsenv_select(monkeypatch):
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123")
+    assert client.transport._pool.proxy.scheme == "https"
+
+
+def test_proxy_none_httpenv_fallback(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    client = Client("https://foo@sentry.io/123")
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_bothselect_bothen(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy="")
+    assert client.transport._pool.proxy is None
+
+
+def test_proxy_bothavoid_bothenv(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None)
+    assert client.transport._pool.proxy.scheme == "https"
+
+
+def test_proxy_bothselect_httpenv(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None)
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_httpselect_bothenv(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="")
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_proxy_httpsselect_bothenv(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy=None)
+    assert client.transport._pool.proxy.scheme == "https"
+
+
+def test_proxy_httpselect_httpsenv(monkeypatch):
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="")
+    assert client.transport._pool.proxy is None
+
+
+def test_proxy_httpsselect_bothenv_http(monkeypatch):
+    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
+    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
+    client = Client("http://foo@sentry.io/123", http_proxy=None, https_proxy=None)
+    assert client.transport._pool.proxy.scheme == "http"
+
+
+def test_simple_transport():
+    events = []
+    with Hub(Client(transport=events.append)):
+        capture_message("Hello World!")
+    assert events[0]["message"] == "Hello World!"
+
+
+def test_ignore_errors():
+    class MyDivisionError(ZeroDivisionError):
+        pass
+
+    def raise_it(exc_info):
+        reraise(*exc_info)
+
+    hub = Hub(Client(ignore_errors=[ZeroDivisionError], transport=_TestTransport()))
+    hub._capture_internal_exception = raise_it
+
+    def e(exc):
+        try:
+            raise exc
+        except Exception:
+            hub.capture_exception()
+
+    e(ZeroDivisionError())
+    e(MyDivisionError())
+    pytest.raises(EventCaptured, lambda: e(ValueError()))
+
+
+def test_with_locals_enabled():
+    events = []
+    hub = Hub(Client(with_locals=True, transport=events.append))
+    try:
+        1 / 0
+    except Exception:
+        hub.capture_exception()
+
+    event, = events
+
+    assert all(
+        frame["vars"]
+        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
+    )
+
+
+def test_with_locals_disabled():
+    events = []
+    hub = Hub(Client(with_locals=False, transport=events.append))
+    try:
+        1 / 0
+    except Exception:
+        hub.capture_exception()
+
+    event, = events
+
+    assert all(
+        "vars" not in frame
+        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
+    )
+
+
+def test_attach_stacktrace_enabled():
+    events = []
+    hub = Hub(Client(attach_stacktrace=True, transport=events.append))
+
+    def foo():
+        bar()
+
+    def bar():
+        hub.capture_message("HI")
+
+    foo()
+
+    event, = events
+    thread, = event["threads"]["values"]
+    functions = [x["function"] for x in thread["stacktrace"]["frames"]]
+    assert functions[-2:] == ["foo", "bar"]
+
+
+def test_attach_stacktrace_enabled_no_locals():
+    events = []
+    hub = Hub(
+        Client(attach_stacktrace=True, with_locals=False, transport=events.append)
+    )
+
+    def foo():
+        bar()
+
+    def bar():
+        hub.capture_message("HI")
+
+    foo()
+
+    event, = events
+    thread, = event["threads"]["values"]
+    local_vars = [x.get("vars") for x in thread["stacktrace"]["frames"]]
+    assert local_vars[-2:] == [None, None]
+
+
+def test_attach_stacktrace_in_app(sentry_init, capture_events):
+    sentry_init(attach_stacktrace=True, in_app_exclude=["_pytest"])
+    events = capture_events()
+
+    capture_message("hi")
+
+    event, = events
+    thread, = event["threads"]["values"]
+    frames = thread["stacktrace"]["frames"]
+    pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
+    assert pytest_frames
+    assert all(f["in_app"] is False for f in pytest_frames)
+    assert any(f["in_app"] for f in frames)
+
+
+def test_attach_stacktrace_disabled():
+    events = []
+    hub = Hub(Client(attach_stacktrace=False, transport=events.append))
+    hub.capture_message("HI")
+
+    event, = events
+    assert "threads" not in event
+
+
+def test_capture_event_works():
+    c = Client(transport=_TestTransport())
+    pytest.raises(EventCaptured, lambda: c.capture_event({}))
+    pytest.raises(EventCaptured, lambda: c.capture_event({}))
+
+
+@pytest.mark.parametrize("num_messages", [10, 20])
+def test_atexit(tmpdir, monkeypatch, num_messages):
+    app = tmpdir.join("app.py")
+    app.write(
+        dedent(
+            """
+    import time
+    from sentry_sdk import init, transport, capture_message
+
+    def send_event(self, event):
+        time.sleep(0.1)
+        print(event["message"])
+
+    transport.HttpTransport._send_event = send_event
+    init("http://foobar@localhost/123", shutdown_timeout={num_messages})
+
+    for _ in range({num_messages}):
+        capture_message("HI")
+    """.format(
+                num_messages=num_messages
+            )
+        )
+    )
+
+    start = time.time()
+    output = subprocess.check_output([sys.executable, str(app)])
+    end = time.time()
+
+    # Each message takes at least 0.1 seconds to process
+    assert int(end - start) >= num_messages / 10
+
+    assert output.count(b"HI") == num_messages
+
+
+def test_configure_scope_available(sentry_init, request, monkeypatch):
+    # Test that scope is configured if client is configured
+    sentry_init()
+
+    with configure_scope() as scope:
+        assert scope is Hub.current._stack[-1][1]
+        scope.set_tag("foo", "bar")
+
+    calls = []
+
+    def callback(scope):
+        calls.append(scope)
+        scope.set_tag("foo", "bar")
+
+    assert configure_scope(callback) is None
+    assert len(calls) == 1
+    assert calls[0] is Hub.current._stack[-1][1]
+
+
+@pytest.mark.parametrize("no_sdk", (True, False))
+def test_configure_scope_unavailable(no_sdk, monkeypatch):
+    if no_sdk:
+        # Emulate minimal without SDK installation: callbacks are not called
+        monkeypatch.setattr(HubMeta, "current", None)
+        assert not Hub.current
+    else:
+        # Still, no client configured
+        assert Hub.current
+
+    calls = []
+
+    def callback(scope):
+        calls.append(scope)
+        scope.set_tag("foo", "bar")
+
+    with configure_scope() as scope:
+        scope.set_tag("foo", "bar")
+
+    assert configure_scope(callback) is None
+    assert not calls
+
+
+@pytest.mark.tests_internal_exceptions
+def test_client_debug_option_enabled(sentry_init, caplog):
+    sentry_init(debug=True)
+
+    Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
+    assert "OK" in caplog.text
+
+
+@pytest.mark.tests_internal_exceptions
+@pytest.mark.parametrize("with_client", (True, False))
+def test_client_debug_option_disabled(with_client, sentry_init, caplog):
+    if with_client:
+        sentry_init()
+
+    Hub.current._capture_internal_exception((ValueError, ValueError("OK"), None))
+    assert "OK" not in caplog.text
+
+
+def test_scope_initialized_before_client(sentry_init, capture_events):
+    """
+    This is a consequence of how configure_scope() works. We must
+    make `configure_scope()` a noop if no client is configured. Even
+    if the user later configures a client: We don't know that.
+    """
+    with configure_scope() as scope:
+        scope.set_tag("foo", 42)
+
+    sentry_init()
+
+    events = capture_events()
+    capture_message("hi")
+    event, = events
+
+    assert "tags" not in event
+
+
+def test_weird_chars(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+    capture_message(u"föö".encode("latin1"))
+    event, = events
+    assert json.loads(json.dumps(event)) == event
+
+
+def test_nan(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        nan = float("nan")  # noqa
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    frame, = frames
+    assert frame["vars"]["nan"] == "nan"
+
+
+def test_cyclic_frame_vars(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        a = {}
+        a["a"] = a
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
+        "a": ""
+    }
+
+
+def test_cyclic_data(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    with configure_scope() as scope:
+        data = {}
+        data["is_cyclic"] = data
+
+        other_data = ""
+        data["not_cyclic"] = other_data
+        data["not_cyclic2"] = other_data
+        scope.set_extra("foo", data)
+
+    capture_message("hi")
+    event, = events
+
+    data = event["extra"]["foo"]
+    assert data == {"not_cyclic2": "", "not_cyclic": "", "is_cyclic": ""}
+
+
+def test_databag_depth_stripping(sentry_init, capture_events, benchmark):
+    sentry_init()
+    events = capture_events()
+
+    value = ["a"]
+    for _ in range(100000):
+        value = [value]
+
+    @benchmark
+    def inner():
+        del events[:]
+        try:
+            a = value  # noqa
+            1 / 0
+        except Exception:
+            capture_exception()
+
+        event, = events
+
+        assert len(json.dumps(event)) < 10000
+
+
+def test_databag_string_stripping(sentry_init, capture_events, benchmark):
+    sentry_init()
+    events = capture_events()
+
+    @benchmark
+    def inner():
+        del events[:]
+        try:
+            a = "A" * 1000000  # noqa
+            1 / 0
+        except Exception:
+            capture_exception()
+
+        event, = events
+
+        assert len(json.dumps(event)) < 10000
+
+
+def test_databag_breadth_stripping(sentry_init, capture_events, benchmark):
+    sentry_init()
+    events = capture_events()
+
+    @benchmark
+    def inner():
+        del events[:]
+        try:
+            a = ["a"] * 1000000  # noqa
+            1 / 0
+        except Exception:
+            capture_exception()
+
+        event, = events
+
+        assert len(json.dumps(event)) < 10000
+
+
+@pytest.mark.skipif(not HAS_CHAINED_EXCEPTIONS, reason="Only works on 3.3+")
+def test_chained_exceptions(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        try:
+            raise ValueError()
+        except Exception:
+            1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+
+    e1, e2 = event["exception"]["values"]
+
+    # This is the order all other SDKs send chained exceptions in. Including
+    # Raven-Python.
+
+    assert e1["type"] == "ValueError"
+    assert e2["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.tests_internal_exceptions
+def test_broken_mapping(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    class C(Mapping):
+        def broken(self, *args, **kwargs):
+            raise Exception("broken")
+
+        __getitem__ = broken
+        __setitem__ = broken
+        __delitem__ = broken
+        __iter__ = broken
+        __len__ = broken
+
+        def __repr__(self):
+            return "broken"
+
+    try:
+        a = C()  # noqa
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+    assert (
+        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
+        == ""
+    )
+
+
+def test_mapping_sends_exception(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    class C(Mapping):
+        def __iter__(self):
+            try:
+                1 / 0
+            except ZeroDivisionError:
+                capture_exception()
+            yield "hi"
+
+        def __len__(self):
+            """List length"""
+            return 1
+
+        def __getitem__(self, ii):
+            """Get a list item"""
+            if ii == "hi":
+                return "hi"
+
+            raise KeyError()
+
+    try:
+        a = C()  # noqa
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+
+    assert event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"] == {
+        "hi": "'hi'"
+    }
+
+
+def test_object_sends_exception(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    class C(object):
+        def __repr__(self):
+            try:
+                1 / 0
+            except ZeroDivisionError:
+                capture_exception()
+            return "hi, i am a repr"
+
+    try:
+        a = C()  # noqa
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    event, = events
+
+    assert (
+        event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]
+        == "hi, i am a repr"
+    )
+
+
+def test_errno_errors(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    class Foo(Exception):
+        errno = 69
+
+    capture_exception(Foo())
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+    assert exception["mechanism"]["meta"]["errno"]["number"] == 69
+
+
+def test_non_string_variables(sentry_init, capture_events):
+    """There is some extremely terrible code in the wild that
+    inserts non-strings as variable names into `locals()`."""
+
+    sentry_init()
+    events = capture_events()
+
+    try:
+        locals()[42] = True
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    event, = events
+
+    exception, = event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+    frame, = exception["stacktrace"]["frames"]
+    assert frame["vars"]["42"] == "True"
+
+
+def test_dict_changed_during_iteration(sentry_init, capture_events):
+    """
+    Some versions of Bottle modify the WSGI environment inside of this __repr__
+    impl: https://github.com/bottlepy/bottle/blob/0.12.16/bottle.py#L1386
+
+    See https://github.com/getsentry/sentry-python/pull/298 for discussion
+    """
+    sentry_init(send_default_pii=True)
+    events = capture_events()
+
+    class TooSmartClass(object):
+        def __init__(self, environ):
+            self.environ = environ
+
+        def __repr__(self):
+            if "my_representation" in self.environ:
+                return self.environ["my_representation"]
+
+            self.environ["my_representation"] = ""
+            return self.environ["my_representation"]
+
+    try:
+        environ = {}
+        environ["a"] = TooSmartClass(environ)
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    event, = events
+    exception, = event["exception"]["values"]
+    frame, = exception["stacktrace"]["frames"]
+    assert frame["vars"]["environ"] == {"a": ""}
+
+
+@pytest.mark.parametrize(
+    "dsn",
+    [
+        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
+        u"http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
+    ],
+)
+def test_init_string_types(dsn, sentry_init):
+    # Allow unicode strings on Python 3 and both on Python 2 (due to
+    # unicode_literals)
+    #
+    # Supporting bytes on Python 3 is not really wrong but probably would be
+    # extra code
+    sentry_init(dsn)
+    assert (
+        Hub.current.client.dsn
+        == "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2"
+    )
diff --git a/tests/test_scope.py b/tests/test_scope.py
new file mode 100644
index 0000000..b9c3335
--- /dev/null
+++ b/tests/test_scope.py
@@ -0,0 +1,17 @@
+import copy
+from sentry_sdk.scope import Scope
+
+
+def test_copying():
+    s1 = Scope()
+    s1.fingerprint = {}
+    s1.set_tag("foo", "bar")
+
+    s2 = copy.copy(s1)
+    assert "foo" in s2._tags
+
+    s1.set_tag("bam", "baz")
+    assert "bam" in s1._tags
+    assert "bam" not in s2._tags
+
+    assert s1._fingerprint is s2._fingerprint
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
new file mode 100644
index 0000000..ce8276b
--- /dev/null
+++ b/tests/test_serializer.py
@@ -0,0 +1,28 @@
+from datetime import datetime
+
+from hypothesis import given, example
+import hypothesis.strategies as st
+
+import pytest
+
+from sentry_sdk.serializer import Serializer
+
+
+@given(
+    dt=st.datetimes(min_value=datetime(2000, 1, 1, 0, 0, 0), timezones=st.just(None))
+)
+@example(dt=datetime(2001, 1, 1, 0, 0, 0, 999500))
+def test_datetime_precision(dt, semaphore_normalize):
+    serializer = Serializer()
+
+    event = serializer.serialize_event({"timestamp": dt})
+    normalized = semaphore_normalize(event)
+
+    if normalized is None:
+        pytest.skip("no semaphore available")
+
+    dt2 = datetime.utcfromtimestamp(normalized["timestamp"])
+
+    # Float glitches can happen, and more glitches can happen
+    # because we try to work around some float glitches in semaphore
+    assert (dt - dt2).total_seconds() < 1.0
diff --git a/tests/test_tracing.py b/tests/test_tracing.py
new file mode 100644
index 0000000..0bb3e1c
--- /dev/null
+++ b/tests/test_tracing.py
@@ -0,0 +1,144 @@
+import weakref
+import gc
+
+import pytest
+
+from sentry_sdk import Hub, capture_message
+from sentry_sdk.tracing import Span
+
+
+@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
+def test_basic(sentry_init, capture_events, sample_rate):
+    sentry_init(traces_sample_rate=sample_rate)
+    events = capture_events()
+
+    with Hub.current.start_span(transaction="hi"):
+        with pytest.raises(ZeroDivisionError):
+            with Hub.current.start_span(op="foo", description="foodesc"):
+                1 / 0
+
+        with Hub.current.start_span(op="bar", description="bardesc"):
+            pass
+
+    if sample_rate:
+        event, = events
+
+        span1, span2 = event["spans"]
+        parent_span = event
+        assert span1["tags"]["status"] == "failure"
+        assert span1["op"] == "foo"
+        assert span1["description"] == "foodesc"
+        assert "status" not in span2["tags"]
+        assert span2["op"] == "bar"
+        assert span2["description"] == "bardesc"
+        assert parent_span["transaction"] == "hi"
+    else:
+        assert not events
+
+
+@pytest.mark.parametrize("sampled", [True, False, None])
+def test_continue_from_headers(sentry_init, capture_events, sampled):
+    sentry_init(traces_sample_rate=1.0, traceparent_v2=True)
+    events = capture_events()
+
+    with Hub.current.start_span(transaction="hi"):
+        with Hub.current.start_span() as old_span:
+            old_span.sampled = sampled
+            headers = dict(Hub.current.iter_trace_propagation_headers())
+
+    header = headers["sentry-trace"]
+    if sampled is True:
+        assert header.endswith("-1")
+    if sampled is False:
+        assert header.endswith("-0")
+    if sampled is None:
+        assert header.endswith("-")
+
+    span = Span.continue_from_headers(headers)
+    span.transaction = "WRONG"
+    assert span is not None
+    assert span.sampled == sampled
+    assert span.trace_id == old_span.trace_id
+
+    with Hub.current.start_span(span):
+        with Hub.current.configure_scope() as scope:
+            scope.transaction = "ho"
+        capture_message("hello")
+
+    if sampled is False:
+        trace1, message = events
+
+        assert trace1["transaction"] == "hi"
+    else:
+        trace1, message, trace2 = events
+
+        assert trace1["transaction"] == "hi"
+        assert trace2["transaction"] == "ho"
+
+        assert (
+            trace1["contexts"]["trace"]["trace_id"]
+            == trace2["contexts"]["trace"]["trace_id"]
+            == span.trace_id
+            == message["contexts"]["trace"]["trace_id"]
+        )
+
+    assert message["message"] == "hello"
+
+
+def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=0.5)
+
+    with Hub.current.start_span(transaction="hi") as trace:
+        assert trace.sampled is not None
+
+        with Hub.current.start_span() as span:
+            assert span.sampled == trace.sampled
+
+    with Hub.current.start_span() as span:
+        assert span.sampled is None
+
+
+@pytest.mark.parametrize(
+    "args,expected_refcount",
+    [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
+)
+def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
+    sentry_init(**args)
+
+    references = weakref.WeakSet()
+
+    with Hub.current.start_span(transaction="hi"):
+        for i in range(100):
+            with Hub.current.start_span(
+                op="helloworld", description="hi {}".format(i)
+            ) as span:
+
+                def foo():
+                    pass
+
+                references.add(foo)
+                span.set_tag("foo", foo)
+                pass
+
+        del foo
+        del span
+
+        # required only for pypy (cpython frees immediately)
+        gc.collect()
+
+        assert len(references) == expected_refcount
+
+
+def test_span_trimming(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
+    events = capture_events()
+
+    with Hub.current.start_span(transaction="hi"):
+        for i in range(10):
+            with Hub.current.start_span(op="foo{}".format(i)):
+                pass
+
+    event, = events
+    span1, span2 = event["spans"]
+    assert span1["op"] == "foo0"
+    assert span2["op"] == "foo1"
diff --git a/tests/test_transport.py b/tests/test_transport.py
new file mode 100644
index 0000000..a90aea5
--- /dev/null
+++ b/tests/test_transport.py
@@ -0,0 +1,55 @@
+# coding: utf-8
+import logging
+import pickle
+
+from datetime import datetime
+
+import pytest
+
+from sentry_sdk import Hub, Client, add_breadcrumb, capture_message
+
+
+@pytest.fixture(params=[True, False])
+def make_client(request):
+    def inner(*args, **kwargs):
+        client = Client(*args, **kwargs)
+        if request.param:
+            client = pickle.loads(pickle.dumps(client))
+
+        return client
+
+    return inner
+
+
+@pytest.mark.parametrize("debug", (True, False))
+@pytest.mark.parametrize("client_flush_method", ["close", "flush"])
+def test_transport_works(
+    httpserver,
+    request,
+    capsys,
+    caplog,
+    debug,
+    make_client,
+    client_flush_method,
+    maybe_monkeypatched_threading,
+):
+    httpserver.serve_content("ok", 200)
+
+    caplog.set_level(logging.DEBUG)
+
+    client = make_client(
+        "http://foobar@{}/123".format(httpserver.url[len("http://") :]), debug=debug
+    )
+    Hub.current.bind_client(client)
+    request.addfinalizer(lambda: Hub.current.bind_client(None))
+
+    add_breadcrumb(level="info", message="i like bread", timestamp=datetime.now())
+    capture_message("löl")
+
+    getattr(client, client_flush_method)()
+
+    out, err = capsys.readouterr()
+    assert not err and not out
+    assert httpserver.requests
+
+    assert any("Sending event" in record.msg for record in caplog.records) == debug
diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py
new file mode 100644
index 0000000..df16fdc
--- /dev/null
+++ b/tests/utils/__init__.py
@@ -0,0 +1 @@
+# Make this a module for test_abs_path.
diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py
new file mode 100644
index 0000000..62344f1
--- /dev/null
+++ b/tests/utils/test_contextvars.py
@@ -0,0 +1,49 @@
+import random
+import time
+
+
+from sentry_sdk.utils import _is_threading_local_monkey_patched
+
+
+def test_thread_local_is_patched(maybe_monkeypatched_threading):
+    if maybe_monkeypatched_threading is None:
+        assert not _is_threading_local_monkey_patched()
+    else:
+        assert _is_threading_local_monkey_patched()
+
+
+def test_leaks(maybe_monkeypatched_threading):
+    import threading
+
+    # Need to explicitly call _get_contextvars because the SDK has already
+    # decided upon gevent on import.
+
+    from sentry_sdk import utils
+
+    _, ContextVar = utils._get_contextvars()
+
+    ts = []
+
+    var = ContextVar("test_contextvar_leaks")
+
+    success = []
+
+    def run():
+        value = int(random.random() * 1000)
+        var.set(value)
+
+        for _ in range(100):
+            time.sleep(0)
+            assert var.get(None) == value
+
+        success.append(1)
+
+    for _ in range(20):
+        t = threading.Thread(target=run)
+        t.start()
+        ts.append(t)
+
+    for t in ts:
+        t.join()
+
+    assert len(success) == 20
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
new file mode 100644
index 0000000..3665d51
--- /dev/null
+++ b/tests/utils/test_general.py
@@ -0,0 +1,131 @@
+# coding: utf-8
+import sys
+import os
+
+import pytest
+
+from hypothesis import given
+import hypothesis.strategies as st
+
+from sentry_sdk.utils import (
+    BadDsn,
+    Dsn,
+    safe_repr,
+    exceptions_from_error_tuple,
+    filename_for_module,
+    handle_in_app_impl,
+    iter_event_stacktraces,
+)
+from sentry_sdk._compat import text_type
+
+any_string = st.one_of(st.binary(), st.text())
+
+
+@given(x=any_string)
+def test_safe_repr_never_broken_for_strings(x):
+    r = safe_repr(x)
+    assert isinstance(r, text_type)
+    assert u"broken repr" not in r
+
+
+def test_safe_repr_regressions():
+    assert u"лошадь" in safe_repr(u"лошадь")
+
+
+def test_abs_path():
+    """Check if abs_path is actually an absolute path. This can happen either
+    with eval/exec like here, or when the file in the frame is relative to
+    __main__"""
+
+    code = compile("1/0", "test.py", "exec")
+    try:
+        exec(code, {})
+    except Exception:
+        exceptions = exceptions_from_error_tuple(sys.exc_info())
+
+    exception, = exceptions
+    frame1, frame2 = frames = exception["stacktrace"]["frames"]
+
+    for frame in frames:
+        assert os.path.abspath(frame["abs_path"]) == frame["abs_path"]
+
+    assert frame1["filename"] == "tests/utils/test_general.py"
+    assert frame2["filename"] == "test.py"
+
+
+def test_filename():
+    x = filename_for_module
+
+    assert x("bogus", "bogus") == "bogus"
+
+    assert x("os", os.__file__) == "os.py"
+    assert x("pytest", pytest.__file__) == "pytest.py"
+
+    import sentry_sdk.utils
+
+    assert x("sentry_sdk.utils", sentry_sdk.utils.__file__) == "sentry_sdk/utils.py"
+
+
+@pytest.mark.parametrize(
+    "given,expected",
+    [
+        ("https://foobar@sentry.io/123", "https://sentry.io/api/123/store/"),
+        ("https://foobar@sentry.io/bam/123", "https://sentry.io/bam/api/123/store/"),
+        (
+            "https://foobar@sentry.io/bam/baz/123",
+            "https://sentry.io/bam/baz/api/123/store/",
+        ),
+    ],
+)
+def test_parse_dsn_paths(given, expected):
+    dsn = Dsn(given)
+    auth = dsn.to_auth()
+    assert auth.store_api_url == expected
+
+
+@pytest.mark.parametrize(
+    "dsn",
+    [
+        "https://foobar@sentry.io"
+        "https://foobar@sentry.io/"
+        "https://foobar@sentry.io/asdf"
+        "https://foobar@sentry.io/asdf/"
+        "https://foobar@sentry.io/asdf/123/"
+    ],
+)
+def test_parse_invalid_dsn(dsn):
+    with pytest.raises(BadDsn):
+        dsn = Dsn(dsn)
+
+
+@pytest.mark.parametrize("empty", [None, []])
+def test_in_app(empty):
+    assert handle_in_app_impl(
+        [{"module": "foo"}, {"module": "bar"}],
+        in_app_include=["foo"],
+        in_app_exclude=empty,
+    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
+
+    assert handle_in_app_impl(
+        [{"module": "foo"}, {"module": "bar"}],
+        in_app_include=["foo"],
+        in_app_exclude=["foo"],
+    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
+
+    assert handle_in_app_impl(
+        [{"module": "foo"}, {"module": "bar"}],
+        in_app_include=empty,
+        in_app_exclude=["foo"],
+    ) == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}]
+
+
+def test_iter_stacktraces():
+    assert set(
+        iter_event_stacktraces(
+            {
+                "threads": {"values": [{"stacktrace": 1}]},
+                "stacktrace": 2,
+                "exception": {"values": [{"stacktrace": 3}]},
+            }
+        )
+    ) == {1, 2, 3}
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
new file mode 100644
index 0000000..6548d80
--- /dev/null
+++ b/tests/utils/test_transaction.py
@@ -0,0 +1,20 @@
+from sentry_sdk.utils import transaction_from_function
+
+
+class MyClass:
+    def myfunc():
+        pass
+
+
+def myfunc():
+    pass
+
+
+def test_transaction_from_function():
+    x = transaction_from_function
+    assert x(MyClass) == "tests.utils.test_transaction.MyClass"
+    assert x(MyClass.myfunc) == "tests.utils.test_transaction.MyClass.myfunc"
+    assert x(myfunc) == "tests.utils.test_transaction.myfunc"
+    assert x(None) is None
+    assert x(42) is None
+    assert x(lambda: None).endswith("")
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..c79a924
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,211 @@
+# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install tox"
+# and then run "tox" from this directory.
+
+[tox]
+envlist =
+    # === Core ===
+    py{2.7,3.4,3.5,3.6,3.7,3.8}
+    pypy
+
+
+    # === Integrations ===
+    # Formatting: 1 blank line between different integrations.
+
+    py3.7-django-{2.2,dev}
+    {py3.5,py3.6,py3.7}-django-{2.0,2.1}
+    {pypy,py2.7,py3.5}-django-1.11
+    {pypy,py2.7,py3.4,py3.5}-django-{1.8,1.9,1.10}
+    {pypy,py2.7,py3.4}-django-1.7
+    {pypy,py2.7}-django-1.6
+
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12,dev}
+
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-bottle-0.12
+
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-falcon-1.4
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-falcon-2.0
+
+    {py3.5,py3.6,py3.7}-sanic-{0.8,18}
+
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.1,4.2,4.3}
+    {pypy,py2.7}-celery-3
+
+    py2.7-beam-{12,13}
+    py3.7-beam-{12,13, master}
+
+    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
+    py3.7-aws_lambda
+
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.3,1.4,1.5,1.6,1.7,1.8,1.9,1.10}
+
+    {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
+    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1}
+
+    py3.7-aiohttp-{3.5,3.6}
+
+    {py3.7,py3.8}-tornado-{5,6}
+
+    {py2.7,py3.7}-requests
+
+    {py2.7,py3.7}-redis
+
+    py3.7-asgi
+
+    {py2.7,py3.7}-sqlalchemy-{1.2,1.3}
+
+[testenv]
+deps =
+    -r test-requirements.txt
+
+    django-{1.11,2.0,2.1,2.2}: djangorestframework>=3.0.0,<4.0.0
+    py3.7-django-{1.11,2.0,2.1,2.2}: channels>2
+    py3.7-django-{1.11,2.0,2.1,2.2}: pytest-asyncio
+    {py2.7,py3.7}-django-{1.11,2.2}: psycopg2-binary
+
+    django-{1.6,1.7,1.8}: pytest-django<3.0
+    django-{1.9,1.10,1.11,2.0,2.1,2.2,dev}: pytest-django>=3.0
+
+    django-1.6: Django>=1.6,<1.7
+    django-1.7: Django>=1.7,<1.8
+    django-1.8: Django>=1.8,<1.9
+    django-1.9: Django>=1.9,<1.10
+    django-1.10: Django>=1.10,<1.11
+    django-1.11: Django>=1.11,<1.12
+    django-2.0: Django>=2.0,<2.1
+    django-2.1: Django>=2.1,<2.2
+    django-2.2: Django>=2.2,<2.3
+    django-dev: git+https://github.com/django/django.git#egg=Django
+
+    flask: flask-login
+    flask-0.11: Flask>=0.11,<0.12
+    flask-0.12: Flask>=0.12,<0.13
+    flask-1.0: Flask>=1.0,<1.1
+    flask-1.1: Flask>=1.1,<1.2
+    flask-dev: git+https://github.com/pallets/flask.git#egg=flask
+
+    bottle-0.12: bottle>=0.12,<0.13
+    bottle-dev: git+https://github.com/bottlepy/bottle#egg=bottle
+
+    falcon-1.4: falcon>=1.4,<1.5
+    falcon-2.0: falcon>=2.0.0rc3,<3.0
+
+    sanic-0.8: sanic>=0.8,<0.9
+    sanic-18: sanic>=18.0,<19.0
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    sanic: aiohttp
+
+    beam-12: apache-beam>=2.12.0, <2.13.0
+    beam-13: apache-beam>=2.13.0, <2.14.0
+    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
+
+    celery-3: Celery>=3.1,<4.0
+    celery-4.1: Celery>=4.1,<4.2
+    celery-4.2: Celery>=4.2,<4.3
+    celery-4.3: Celery>=4.3,<4.4
+
+    requests: requests>=2.0
+
+    aws_lambda: boto3
+
+    pyramid-1.3: pyramid>=1.3,<1.4
+    pyramid-1.4: pyramid>=1.4,<1.5
+    pyramid-1.5: pyramid>=1.5,<1.6
+    pyramid-1.6: pyramid>=1.6,<1.7
+    pyramid-1.7: pyramid>=1.7,<1.8
+    pyramid-1.8: pyramid>=1.8,<1.9
+    pyramid-1.9: pyramid>=1.9,<1.10
+    pyramid-1.10: pyramid>=1.10,<1.11
+
+    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
+    rq-{0.13,1.0,1.1}: fakeredis>=1.0
+    # https://github.com/jamesls/fakeredis/issues/245
+    rq: redis<3.2.2
+
+    rq-0.6: rq>=0.6,<0.7
+    rq-0.7: rq>=0.7,<0.8
+    rq-0.8: rq>=0.8,<0.9
+    rq-0.9: rq>=0.9,<0.10
+    rq-0.10: rq>=0.10,<0.11
+    rq-0.11: rq>=0.11,<0.12
+    rq-0.12: rq>=0.12,<0.13
+    rq-0.13: rq>=0.13,<0.14
+    rq-1.0: rq>=1.0,<1.1
+    rq-1.1: rq>=1.1,<1.2
+
+    aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
+    aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
+    aiohttp: pytest-aiohttp
+
+    tornado-5: tornado>=5,<6
+    tornado-6: tornado>=6.0a1
+
+    redis: fakeredis
+    # https://github.com/jamesls/fakeredis/issues/245
+    redis: redis<3.2.2
+
+    asgi: starlette
+    asgi: requests
+
+    sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
+    sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
+
+    linters: black
+    linters: flake8
+    linters: flake8-import-order
+    linters: mypy>=0.720
+
+    # https://github.com/PyCQA/flake8-bugbear/pull/77
+    linters: git+https://github.com/untitaker/flake8-bugbear#branch=fix/b901-yield-expr
+setenv =
+    PYTHONDONTWRITEBYTECODE=1
+    TESTPATH=tests
+    beam: TESTPATH=tests/integrations/beam
+    django: TESTPATH=tests/integrations/django
+    flask: TESTPATH=tests/integrations/flask
+    bottle: TESTPATH=tests/integrations/bottle
+    falcon: TESTPATH=tests/integrations/falcon
+    celery: TESTPATH=tests/integrations/celery
+    requests: TESTPATH=tests/integrations/requests
+    aws_lambda: TESTPATH=tests/integrations/aws_lambda
+    sanic: TESTPATH=tests/integrations/sanic
+    pyramid: TESTPATH=tests/integrations/pyramid
+    rq: TESTPATH=tests/integrations/rq
+    aiohttp: TESTPATH=tests/integrations/aiohttp
+    tornado: TESTPATH=tests/integrations/tornado
+    redis: TESTPATH=tests/integrations/redis
+    asgi: TESTPATH=tests/integrations/asgi
+    sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
+
+    COVERAGE_FILE=.coverage-{envname}
+passenv =
+    SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
+    SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
+    SENTRY_PYTHON_TEST_AWS_IAM_ROLE
+    SENTRY_PYTHON_TEST_POSTGRES_USER
+    SENTRY_PYTHON_TEST_POSTGRES_NAME
+usedevelop = True
+extras =
+    flask: flask
+    bottle: bottle
+    falcon: falcon
+
+basepython =
+    py2.7: python2.7
+    py3.4: python3.4
+    py3.5: python3.5
+    py3.6: python3.6
+    py3.7: python3.7
+    py3.8: python3.8
+    linters: python3
+    pypy: pypy
+
+commands =
+    py.test {env:TESTPATH} {posargs}
+
+[testenv:linters]
+commands =
+    flake8 tests examples sentry_sdk
+    black --check tests examples sentry_sdk
+    mypy examples sentry_sdk