Skip to content

Commit

Permalink
Merge branch 'optim-wip' into optim-wip-clip-vis
Browse files Browse the repository at this point in the history
  • Loading branch information
ProGamerGov authored May 13, 2022
2 parents 34bedcf + e2ba3df commit e9598ea
Show file tree
Hide file tree
Showing 316 changed files with 37,924 additions and 5,375 deletions.
123 changes: 73 additions & 50 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,12 @@ commands:
name: "Lint with flake8"
command: flake8

lint_black:
description: "Lint with black"
ufmt_check:
description: "Check formatting with ufmt"
steps:
- run:
name: "Lint with black"
command: black --check --diff .

isort:
description: "Check import order with isort"
steps:
- run:
name: "Check import order with isort"
command: isort --check-only -v .
name: "Check formatting with ufmt"
command: ufmt check .

mypy_check:
description: "Static type checking with mypy"
Expand Down Expand Up @@ -86,7 +79,7 @@ commands:
name: "Deploy website to GitHub Pages"
# TODO: make the installation above conditional on there being relevant changes (no need to install if there are none)
command: |
if ! git diff --name-only HEAD^ | grep -E "(^\.circleci\/.*)|(^docs\/.*)|(^website\/.*)|(^scripts\/.*)|(^sphinx\/.*)|(^tutorials\/.*)"; then
if ! git diff --name-only HEAD^ | grep -E "(^captum\/.*)|(^\.circleci\/.*)|(^docs\/.*)|(^website\/.*)|(^scripts\/.*)|(^sphinx\/.*)|(^tutorials\/.*)"; then
echo "Skipping deploy. No relevant website files have changed"
elif [[ $CIRCLE_PROJECT_USERNAME == "pytorch" && -z $CI_PULL_REQUEST && -z $CIRCLE_PR_USERNAME ]]; then
mkdir -p website/static/.circleci && cp -a .circleci/. website/static/.circleci/.
Expand All @@ -113,6 +106,7 @@ commands:
name: "Switch to Python v3.7"
command: |
pyenv versions
pyenv install 3.7.0
pyenv global 3.7.0
install_cuda:
Expand All @@ -121,76 +115,96 @@ commands:
- run:
name: "Install CUDA"
command: |
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/cuda-ubuntu1604.pin
sudo mv cuda-ubuntu1604.pin /etc/apt/preferences.d/cuda-repository-pin-600
wget http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda-repo-ubuntu1604-10-2-local-10.2.89-440.33.01_1.0-1_amd64.deb
sudo dpkg -i cuda-repo-ubuntu1604-10-2-local-10.2.89-440.33.01_1.0-1_amd64.deb
sudo apt-key add /var/cuda-repo-10-2-local-10.2.89-440.33.01/7fa2af80.pub
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-ubuntu2004.pin
sudo mv cuda-ubuntu2004.pin /etc/apt/preferences.d/cuda-repository-pin-600
wget https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda-repo-ubuntu2004-11-4-local_11.4.2-470.57.02-1_amd64.deb
sudo dpkg -i cuda-repo-ubuntu2004-11-4-local_11.4.2-470.57.02-1_amd64.deb
sudo apt-key add /var/cuda-repo-ubuntu2004-11-4-local/7fa2af80.pub
sudo apt-get update
sudo apt-get --yes --force-yes install cuda
jobs:

lint_test_py36_pip:
lint_py36:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install
- lint_flake8
- ufmt_check
- sphinx

test_py36_pip:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install:
args: "-n"
- lint_flake8
- lint_black
- isort
- mypy_check
- unit_tests
- sphinx

lint_test_py36_pip_release:
test_py36_pip_release:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install
- lint_flake8
- lint_black
- isort
- mypy_check
- unit_tests
- sphinx

test_py36_pip_torch_1_3:
test_py36_pip_torch_1_6:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install:
args: "-v 1.6"
- unit_tests

test_py36_pip_torch_1_7:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install:
args: "-v 1.3"
args: "-v 1.7"
- unit_tests

test_py36_pip_torch_1_2:
test_py36_pip_torch_1_8:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install:
args: "-v 1.2"
args: "-v 1.8"
- unit_tests

lint_test_py37_conda:
test_py36_pip_torch_1_9:
docker:
- image: circleci/python:3.6.8
steps:
- checkout
- pip_install:
args: "-v 1.9"
- unit_tests


test_py37_conda:
docker:
- image: continuumio/miniconda3
steps:
- checkout
- conda_install:
args: "-n"
- lint_flake8
- isort
- unit_tests
- sphinx


test_cuda_multi_gpu:
machine:
image: ubuntu-1604:201903-01
resource_class: gpu.large
image: ubuntu-2004:202201-02
resource_class: gpu.nvidia.medium.multi
steps:
- checkout
- install_cuda
Expand All @@ -205,11 +219,6 @@ jobs:
- checkout
- pip_install:
args: "-n -d"
- lint_flake8
- lint_black
- isort
- unit_tests
- sphinx
- configure_github_bot
- deploy_site

Expand All @@ -225,24 +234,38 @@ aliases:

workflows:

lint_and_test:
lint_test_and_deploy_site:
jobs:
- lint_test_py36_pip:
- lint_py36:
filters: *exclude_ghpages_fbconfig
- lint_test_py36_pip_release:
- test_py36_pip:
filters: *exclude_ghpages_fbconfig
- lint_test_py37_conda:
- test_py36_pip_release:
filters: *exclude_ghpages_fbconfig
- test_py36_pip_torch_1_2:
- test_py37_conda:
filters: *exclude_ghpages_fbconfig
- test_py36_pip_torch_1_3:
- test_py36_pip_torch_1_6:
filters: *exclude_ghpages_fbconfig
- test_py36_pip_torch_1_7:
filters: *exclude_ghpages_fbconfig
- test_py36_pip_torch_1_8:
filters: *exclude_ghpages_fbconfig
- test_py36_pip_torch_1_9:
filters: *exclude_ghpages_fbconfig
- test_cuda_multi_gpu:
filters: *exclude_ghpages_fbconfig

auto_deploy_site:
jobs:
- auto_deploy_site:
requires:
- lint_py36
- test_py36_pip
- test_py36_pip_release
- test_py37_conda
- test_py36_pip_torch_1_6
- test_py36_pip_torch_1_7
- test_py36_pip_torch_1_8
- test_py36_pip_torch_1_9
- test_cuda_multi_gpu
filters:
branches:
only:
Expand Down
4 changes: 2 additions & 2 deletions .conda/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ requirements:
- python>=3.6
run:
- numpy
- pytorch>=1.2
- matplotlib
- pytorch>=1.6
- matplotlib-base

test:
imports:
Expand Down
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
tutorials/* linguist-documentation
*.pt binary
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/---documentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ assignees: ''

## 📚 Documentation

<!-- A clear and concise description of what content in https://captum.ai/docs, https://captum.ai/tutorials or https://captum.ai/api is an issue.
<!-- A clear and concise description of what content in https://captum.ai/docs, https://captum.ai/tutorials or https://captum.ai/api is an issue.
Feel free also to file an issue if you see any problems in the README.md or in the code documentation
-->
10 changes: 5 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ website/pages/tutorials/*

## Generated for Sphinx
website/pages/api/
website/static/js/*
!website/static/js/mathjax.js
!website/static/js/code_block_buttons.js
website/static/_sphinx-sources/
node_modules
website/static/_sphinx/

# Insight
captum/insights/attr_vis/frontend/node_modules/
captum/insights/attr_vis/widget/static
7 changes: 0 additions & 7 deletions .isort.cfg

This file was deleted.

53 changes: 30 additions & 23 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,23 @@
# Contributing to Captum

We want to make contributing to Captum as easy and transparent as possible.
Thank you for your interest in contributing to Captum! We want to make contributing to Captum as easy and transparent as possible.
Before you begin writing code, it is important that you share your intention to contribute with the team, based on the type of contribution:


1. You want to propose and implement a new algorithm, add a new feature or fix a bug. This can be both code and documentation proposals.
1. For all non-outstanding features, bug-fixes and algorithms in the Captum issue list (https://github.com/pytorch/captum/issues) please create an issue first.
2. If the implementation requires API or any other major code changes (new files, packages or algorithms), we will likely request a design document to review and discuss the design and implementation before making changes. An example design document for LIME can be found here (https://github.com/pytorch/captum/issues/467).
3. Once we agree that the plan looks good or confirmed that the change is small enough to not require a detailed design discussion, go ahead and implement it!

2. You want to implement a feature or bug-fix for an outstanding issue.

1. Search for your issue in the Captum issue list (https://github.com/pytorch/captum/issues).
2. Pick an issue and comment that you'd like to work on the feature or bug-fix.
3. If you need more context on a particular issue, please ask and we’ll be happy to help.

Once you implement and test your feature or bug-fix, please submit a Pull Request to https://github.com/pytorch/captum (https://github.com/pytorch/pytorch).

This document covers some of the techical aspects of contributing to Captum. More details on what we are looking for in the contributions can be found in the [Contributing Guidelines](https://captum.ai/docs/contribution_guidelines).


## Development installation
Expand All @@ -18,45 +35,35 @@ pip install -e .[dev]

#### Code Style

Captum uses [black](https://github.com/ambv/black) and [flake8](https://github.com/PyCQA/flake8) to
enforce a common code style across the code base. black and flake8 are installed easily via
pip using `pip install black flake8`, and run locally by calling
Captum uses [ufmt](https://pypi.org/project/ufmt/) and [flake8](https://github.com/PyCQA/flake8) to
enforce a common code style across the code base. ufmt and flake8 are installed easily via
pip using `pip install ufmt flake8`, and run locally by calling
```bash
black .
ufmt format .
flake8 .
```
from the repository root. No additional configuration should be needed (see the
[black documentation](https://black.readthedocs.io/en/stable/installation_and_usage.html#usage)
for advanced usage).

Captum also uses [isort](https://github.com/timothycrosley/isort) to sort imports
alphabetically and separate into sections. isort is installed easily via
pip using `pip install isort`, and run locally by calling
```bash
isort
```
from the repository root. Configuration for isort is located in .isort.cfg.
from the repository root.

We feel strongly that having a consistent code style is extremely important, so
CircleCI will fail on your PR if it does not adhere to the black or flake8 formatting style or isort import ordering.
CircleCI will fail on your PR if it does not adhere to the ufmt or flake8 formatting style.


#### Type Hints

Captum is fully typed using python 3.6+
[type hints](https://www.python.org/dev/peps/pep-0484/).
We expect any contributions to also use proper type annotations, and we enforce
consistency of these in our continuous integration tests.
We expect any contributions to also use proper type annotations, and we enforce
consistency of these in our continuous integration tests.

To type check your code locally, install [mypy](https://github.com/python/mypy),
To type check your code locally, install [mypy](https://github.com/python/mypy),
which can be done with pip using `pip install "mypy>=0.760"`
Then run this script from the repository root:
```bash
./scripts/run_mypy.sh
```
Note that we expect mypy to have version 0.760 or higher, and when type checking, use PyTorch 1.4 or
higher due to fixes to PyTorch type hints available in 1.4. We also use the Literal feature which is
available only in Python 3.8 or above. If type-checking using a previous version of Python, you will
Note that we expect mypy to have version 0.760 or higher, and when type checking, use PyTorch 1.4 or
higher due to fixes to PyTorch type hints available in 1.4. We also use the Literal feature which is
available only in Python 3.8 or above. If type-checking using a previous version of Python, you will
need to install the typing-extension package which can be done with pip using `pip install typing-extensions`.

#### Unit Tests
Expand Down
Loading

0 comments on commit e9598ea

Please sign in to comment.