Skip to content

Commit

Permalink
Merge pull request #300 from BCG-Gamma/dev/2.0.dev0
Browse files Browse the repository at this point in the history
BUILD: release facet 2.0.dev0
  • Loading branch information
j-ittner authored Sep 16, 2021
2 parents ff0c006 + c2f960b commit 819f0ad
Show file tree
Hide file tree
Showing 10 changed files with 50 additions and 116 deletions.
25 changes: 13 additions & 12 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
trigger:
- 1.2.x
- 2.0.x
- release/*

pr:
- 1.2.x
- 2.0.x
- release/*

# set the build name
Expand All @@ -23,15 +23,15 @@ resources:
type: github
endpoint: BCG-Gamma
name: BCG-Gamma/pytools
ref: 1.2.x
ref: 2.0.x

variables:
${{ if not(startsWith(variables['Build.SourceBranch'], 'refs/pull/')) }}:
branchName: $[ replace(variables['Build.SourceBranch'], 'refs/heads/', '') ]
${{ if startsWith(variables['Build.SourceBranch'], 'refs/pull/') }}:
branchName: $[ replace(variables['System.PullRequest.SourceBranch'], 'refs/heads/', '') ]
source_is_release_branch: $[ startsWith(variables['branchName'], 'release') ]
source_is_develop_branch: $[ or(startsWith(variables['branchName'], 'develop'), startsWith(variables['branchName'], 'dev/')) ]
source_is_release_branch: $[ startsWith(variables['branchName'], 'release/') ]
source_is_develop_branch: $[ startsWith(variables['branchName'], 'dev/') ]
is_scheduled: $[ eq(variables['Build.Reason'], 'Schedule') ]
project_name: facet
project_root: $(project_name)
Expand Down Expand Up @@ -97,7 +97,7 @@ stages:
cd $(System.DefaultWorkingDirectory)
files_changed=$(git diff $(Build.SourceVersion)^! --name-only)
echo "Files changed since last commit: ${files_changed}"
n_files_changed=$(git diff $(Build.SourceVersion)^! --name-only | grep -i -E 'meta.yaml|pyproject.toml|azure-pipelines.yml|tox.ini' | wc -l | xargs)
n_files_changed=$(git diff $(Build.SourceVersion)^! --name-only | grep -i -E 'meta\.yaml|pyproject\.toml|azure-pipelines\.yml|tox\.ini|make\.py' | wc -l | xargs)
if [ ${n_files_changed} -gt 0 ]
then
build_changed=1
Expand Down Expand Up @@ -210,7 +210,7 @@ stages:
- script: dir $(Build.SourcesDirectory)

- script: |
conda install -y -c anaconda conda-build~=3.20.5 conda-verify toml=0.10.* flit=3.0.*
conda install -y -c anaconda conda-build~=3.20.5 conda-verify toml=0.10.* flit=3.0.* packaging~=20.9
displayName: 'Install conda-build, flit, toml'
condition: eq(variables['BUILD_SYSTEM'], 'conda')
Expand Down Expand Up @@ -297,7 +297,7 @@ stages:
- script: dir $(Build.SourcesDirectory)

- script: |
conda install -y -c anaconda conda-build~=3.20.5 conda-verify toml=0.10.* flit=3.0.*
conda install -y -c anaconda conda-build~=3.20.5 conda-verify toml=0.10.* flit=3.0.* packaging~=20.9
displayName: 'Install conda-build, flit, toml'
condition: eq(variables['BUILD_SYSTEM'], 'conda')
Expand Down Expand Up @@ -398,7 +398,7 @@ stages:
condition: ne(variables.branchName, 'develop')
script: |
set -eux
python -m pip install "toml==0.10.*"
python -m pip install toml~=0.10.2 packaging~=20.9
cd $(System.DefaultWorkingDirectory)/pytools
python <<EOF
from os import environ
Expand Down Expand Up @@ -461,13 +461,14 @@ stages:
script: |
set -eux
echo "Getting version"
pip install packaging
pip install packaging~=20.9
cd $(System.DefaultWorkingDirectory)/$(project_root)/src
export PYTHONPATH=$(System.DefaultWorkingDirectory)/pytools/sphinx/base
version=$(python -c "import make_base; print(make_base.get_package_version())")
echo "Current version: $version"
echo "Detecting pre-release ('rc' in version)"
echo "Detecting pre-release ('dev' or 'rc' in version)"
prerelease=False
[[ $version == *dev* ]] && prerelease=True && echo "Development release identified"
[[ $version == *rc* ]] && prerelease=True && echo "Pre-release identified"
echo "##vso[task.setvariable variable=current_version]$version"
echo "##vso[task.setvariable variable=is_prerelease]$prerelease"
Expand Down Expand Up @@ -541,7 +542,7 @@ stages:
conda install -c conda-forge -c bcg_gamma $(package_name)
isDraft: false
isPreRelease: $(is_prerelease)
isPrerelease: $(is_prerelease)
assets: |
$(System.ArtifactsDirectory)/tox_default/tox/$(package_name)-*.tar.gz
$(System.ArtifactsDirectory)/conda_default/conda/noarch/$(package_name)-*.tar.bz2
Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ channels:
dependencies:
# run
- boruta_py ~= 0.3
- gamma-pytools ~= 1.2, >= 1.2.1
- gamma-pytools >= 2dev0, < 3a
- joblib ~= 1.0
- lightgbm ~= 3.2
- matplotlib ~= 3.3
Expand All @@ -14,7 +14,7 @@ dependencies:
- python ~= 3.8
- scikit-learn ~= 0.24.2
- scipy ~= 1.5
- sklearndf ~= 1.2
- sklearndf >= 2dev0, < 3a
# build/test
- black = 20.8b1
- conda-build ~= 3.20
Expand Down
12 changes: 6 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ license = "Apache Software License v2.0"

requires = [
# direct requirements of gamma-facet
"gamma-pytools ~=1.2,>=1.2.1",
"gamma-pytools ~=2.0dev0",
"matplotlib ~=3.0",
"numpy >=1.17,<2a",
"packaging >=20",
"pandas >=0.24,<2a",
"scipy ~=1.2",
"shap >=0.34,<0.40a",
"sklearndf ~=1.2",
"sklearndf ~=2.0dev0",
# additional requirements of shap 0.38
"ipython >=7",
]
Expand Down Expand Up @@ -71,15 +71,15 @@ Repository = "https://github.com/BCG-Gamma/facet"

[build.matrix.min]
# direct requirements of gamma-facet
gamma-pytools = "~=1.2.1"
gamma-pytools = "~=2.0.dev0"
matplotlib = "~=3.0.3"
numpy = ">=1.17.5,<18a"
packaging = "~=20.9"
pandas = "~=0.24.2"
python = "~=3.6.13"
scipy = "~=1.2.1"
shap = "~=0.34.0"
sklearndf = "~=1.2.0"
sklearndf = "~=2.0.dev0"
# additional minimum requirements of sklearndf
boruta = "~=0.3.0"
lightgbm = "~=3.0.0"
Expand All @@ -92,15 +92,15 @@ ipython = "~=7.0"

[build.matrix.max]
# direct requirements of gamma-facet
gamma-pytools = "~=1.2,>=1.2.1"
gamma-pytools = ">=2dev0,<3a"
matplotlib = "~=3.4"
numpy = ">=1.20,<2a"
packaging = ">=20.9"
pandas = "~=1.2"
python = "~=3.8"
scipy = "~=1.5"
shap = "~=0.39.0"
sklearndf = "~=1.2"
sklearndf = ">=2dev0,<3a"
# additional maximum requirements of sklearndf
boruta = "~=0.3"
lightgbm = "~=3.2"
Expand Down
2 changes: 1 addition & 1 deletion src/facet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"""


__version__ = "1.2.0"
__version__ = "2.0.dev0"

__logo__ = (
r"""
Expand Down
2 changes: 1 addition & 1 deletion src/facet/crossfit/_crossfit.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ def on_run(self) -> None:
if do_fit:
crossfit._reset_fit()

def collate(self, job_results: List[FitResult]) -> Optional[np.ndarray]:
def aggregate(self, job_results: List[FitResult]) -> Optional[np.ndarray]:
models, scores = zip(*job_results)

if do_fit:
Expand Down
32 changes: 15 additions & 17 deletions src/facet/inspection/_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,23 +275,21 @@ def _make_explainer(_model: T_LearnerPipelineDF) -> BaseExplainer:

else:
shap_df_per_split = JobRunner.from_parallelizable(self).run_jobs(
*(
Job.delayed(self._get_shap_for_split)(
model,
sample,
_make_explainer(model),
self.feature_index_,
self._convert_raw_shap_to_df,
self.get_multi_output_type(),
self._get_multi_output_names(model=model, sample=sample),
)
for model, sample in zip(
crossfit.models(),
(
sample.subsample(iloc=oob_split)
for _, oob_split in crossfit.splits()
),
)
Job.delayed(self._get_shap_for_split)(
model,
sample,
_make_explainer(model),
self.feature_index_,
self._convert_raw_shap_to_df,
self.get_multi_output_type(),
self._get_multi_output_names(model=model, sample=sample),
)
for model, sample in zip(
crossfit.models(),
(
sample.subsample(iloc=oob_split)
for _, oob_split in crossfit.splits()
),
)
)

Expand Down
2 changes: 1 addition & 1 deletion src/facet/selection/_selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,7 @@ def _rank_learners(
)

pipeline_scorings: List[np.ndarray] = list(
JobRunner.from_parallelizable(self).run_queues(*queues)
JobRunner.from_parallelizable(self).run_queues(queues)
)

for crossfit, pipeline_parameters, pipeline_scoring in zip(
Expand Down
24 changes: 10 additions & 14 deletions src/facet/simulation/_simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,12 +383,10 @@ def simulate_actuals(self) -> pd.Series:
y_mean = self.expected_output()

result: List[float] = JobRunner.from_parallelizable(self).run_jobs(
*(
Job.delayed(self._simulate_actuals)(
model, subsample.features, y_mean, self._simulate
)
for model, subsample in self._get_simulations()
Job.delayed(self._simulate_actuals)(
model, subsample.features, y_mean, self._simulate
)
for model, subsample in self._get_simulations()
)

return pd.Series(
Expand Down Expand Up @@ -455,16 +453,14 @@ def _simulate_feature_with_values(
simulation_means_and_sems_per_split: List[
Tuple[Sequence[float], Sequence[float]]
] = JobRunner.from_parallelizable(self).run_jobs(
*(
Job.delayed(UnivariateUpliftSimulator._simulate_values_for_split)(
model=model,
subsample=subsample,
feature_name=feature_name,
simulated_values=simulation_values,
simulate_fn=self._simulate,
)
for (model, subsample) in self._get_simulations()
Job.delayed(UnivariateUpliftSimulator._simulate_values_for_split)(
model=model,
subsample=subsample,
feature_name=feature_name,
simulated_values=simulation_values,
simulate_fn=self._simulate,
)
for (model, subsample) in self._get_simulations()
)

index_name: str
Expand Down
2 changes: 1 addition & 1 deletion src/facet/simulation/viz/_style.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def _make_sub_axes() -> Axes:

def _x_axis_height() -> float:
_, axis_below_size_pixels = main_ax.get_xaxis().get_text_heights(
self.renderer
self.get_renderer()
)
((_, y0), (_, y1)) = main_ax.transData.inverted().transform(
((0, 0), (0, axis_below_size_pixels))
Expand Down
61 changes: 0 additions & 61 deletions test/test/test_package_version.py

This file was deleted.

0 comments on commit 819f0ad

Please sign in to comment.