-
Notifications
You must be signed in to change notification settings - Fork 12
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
513 additions
and
25 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import os | ||
import pandas as pd | ||
import numpy as np | ||
from matplotlib import pyplot as plt | ||
from scipy import stats | ||
import seaborn as sns | ||
import pickle | ||
|
||
PATH_PER = "/Users/Timon/Library/CloudStorage/OneDrive-Charité-UniversitätsmedizinBerlin/Shared Documents - ICN Data World/General/Data/UCSF_OLARU/out_per" | ||
PATH_PER = os.path.join(PATH_PER, "LOHO_ALL_LABELS_ALL_GROUPS_exludehour_False.pkl") | ||
|
||
PATH_FIGURES = "/Users/Timon/Library/CloudStorage/OneDrive-Charité-UniversitätsmedizinBerlin/Shared Documents - ICN Data World/General/Data/UCSF_OLARU/figures_ucsf" | ||
|
||
with open(PATH_PER, "rb") as f: | ||
d_out = pickle.load(f) | ||
|
||
PATH_FEATURES = "/Users/Timon/Library/CloudStorage/OneDrive-Charité-UniversitätsmedizinBerlin/Shared Documents - ICN Data World/General/Data/UCSF_OLARU/features/merged_normalized_10s_window_length/480" | ||
df_all = pd.read_csv(os.path.join(PATH_FEATURES, "all_merged_normed.csv"), index_col=0) | ||
df_all = df_all.dropna(axis=1) | ||
df_all = df_all.replace([np.inf, -np.inf], np.nan) | ||
df_all = df_all.dropna(axis=1) | ||
df_all = df_all.drop(columns=["sub",]) | ||
df_all["pkg_dt"] = pd.to_datetime(df_all["pkg_dt"]) | ||
df_all["hour"] = df_all["pkg_dt"].dt.hour | ||
|
||
# remove columns that start with pkg | ||
df_all = df_all[[c for c in df_all.columns if not c.startswith("pkg")]] | ||
columns_ = df_all.columns | ||
|
||
plt.figure(figsize=(10, 10)) | ||
cols_show = 50 | ||
for idx_, pkg_decode_label in enumerate(["pkg_dk", "pkg_bk", "pkg_tremor"]): | ||
|
||
data = [] | ||
if pkg_decode_label == "pkg_bk": | ||
CLASS_ = False | ||
else: | ||
CLASS_ = True | ||
d_out_ = d_out[CLASS_][pkg_decode_label]["ecog_stn"] | ||
for sub in d_out_.keys(): | ||
data.append(d_out_[sub]["feature_importances"]) | ||
fimp = np.array(data) | ||
mean_fimp = fimp.mean(axis=0) | ||
cols_sorted = np.array(columns_)[np.argsort(mean_fimp)[::-1]] | ||
|
||
plt.subplot(3, 1, idx_+1) | ||
plt.barh(cols_sorted[:cols_show], mean_fimp[np.argsort(mean_fimp)[::-1]][:cols_show]) | ||
plt.gca().invert_yaxis() | ||
plt.title(pkg_decode_label) | ||
plt.xlabel("Feature importance - Prediction Value Change") | ||
#plt.xticks(rotation=90) | ||
plt.tight_layout() | ||
plt.savefig(os.path.join(PATH_FIGURES, "feature_importance_plt_bar.pdf")) | ||
plt.show(block=True) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
import pandas as pd | ||
import numpy as np | ||
from matplotlib import pyplot as plt | ||
import matplotlib as mpl | ||
import pickle | ||
import os | ||
import seaborn as sns | ||
|
||
|
||
def read_per(d_out): | ||
l = [] | ||
for CLASSIFICATION in d_out.keys(): | ||
if CLASSIFICATION is True: | ||
per_ = "ba" | ||
else: | ||
per_ = "corr_coeff" | ||
for pkg_label in d_out[CLASSIFICATION].keys(): | ||
for sub in d_out[CLASSIFICATION][pkg_label]["ecog_stn"].keys(): | ||
l.append({ | ||
"sub": sub, | ||
"pkg_label": pkg_label, | ||
"CLASSIFICATION": CLASSIFICATION, | ||
"per": d_out[CLASSIFICATION][pkg_label]["ecog_stn"][sub][per_] | ||
}) | ||
df_loso = pd.DataFrame(l) | ||
return df_loso | ||
|
||
PATH_PER = "/Users/Timon/Library/CloudStorage/OneDrive-Charité-UniversitätsmedizinBerlin/Shared Documents - ICN Data World/General/Data/UCSF_OLARU/out_per" | ||
PATH_FIGURES = "/Users/Timon/Library/CloudStorage/OneDrive-Charité-UniversitätsmedizinBerlin/Shared Documents - ICN Data World/General/Data/UCSF_OLARU/figures_ucsf" | ||
l_ = [] | ||
for exclude_hour in [True, False]: | ||
file = f"LOHO_ALL_LABELS_ALL_GROUPS_exludehour_{exclude_hour}.pkl" | ||
with open(os.path.join(PATH_PER, file), "rb") as f: | ||
d_out = pickle.load(f) | ||
df_ = read_per(d_out) | ||
df_["hour_feature"] = not exclude_hour | ||
l_.append(df_) | ||
df_h = pd.concat(l_, axis=0) | ||
|
||
|
||
|
||
l_ = [] | ||
for exclude_night in [True, False]: | ||
file = f"LOHO_ALL_LABELS_ALL_GROUPS_exludenight_{exclude_night}.pkl" | ||
with open(os.path.join(PATH_PER, file), "rb") as f: | ||
d_out = pickle.load(f) | ||
df_ = read_per(d_out) | ||
df_["include_night"] = not exclude_night | ||
l_.append(df_) | ||
df_n = pd.concat(l_, axis=0) | ||
|
||
def set_box_alpha(ax, alpha=0.5): | ||
for patch in ax.patches: | ||
r, g, b, a = patch.get_facecolor() | ||
patch.set_facecolor((r, g, b, alpha)) | ||
|
||
plt.figure(figsize=(10, 7), dpi=300) | ||
plt.subplot(2, 2, 1) | ||
ax = sns.boxplot(data=df_h.query("CLASSIFICATION == True"), x="pkg_label", y="per", hue="hour_feature", palette="viridis", showmeans=True, showfliers=False); set_box_alpha(ax) | ||
sns.swarmplot(data=df_h.query("CLASSIFICATION == True"), x="pkg_label", y="per", hue="hour_feature", dodge=True, palette="viridis", alpha=0.9, s=2) | ||
plt.ylabel("Balanced accuracy") | ||
plt.subplot(2, 2, 2) | ||
ax = sns.boxplot(data=df_h.query("CLASSIFICATION == False"), x="pkg_label", y="per", hue="hour_feature", palette="viridis", showmeans=True, showfliers=False); set_box_alpha(ax) | ||
sns.swarmplot(data=df_h.query("CLASSIFICATION == False"), x="pkg_label", y="per", hue="hour_feature", dodge=True, palette="viridis", alpha=0.9, s=2) | ||
plt.ylabel("Correlation coefficient") | ||
plt.tight_layout() | ||
plt.savefig(os.path.join(PATH_FIGURES, "figure_35_per_exlude_hour_feature.pdf")) | ||
plt.subplot(2, 2, 3) | ||
ax = sns.boxplot(data=df_n.query("CLASSIFICATION == True"), x="pkg_label", y="per", hue="include_night", palette="viridis", showmeans=True, showfliers=False); set_box_alpha(ax) | ||
sns.swarmplot(data=df_n.query("CLASSIFICATION == True"), x="pkg_label", y="per", hue="include_night", dodge=True, palette="viridis", alpha=0.9, s=2) | ||
plt.ylabel("Balanced accuracy") | ||
plt.subplot(2, 2, 4) | ||
ax = sns.boxplot(data=df_n.query("CLASSIFICATION == False"), x="pkg_label", y="per", hue="include_night", palette="viridis", showmeans=True, showfliers=False); set_box_alpha(ax) | ||
sns.swarmplot(data=df_n.query("CLASSIFICATION == False"), x="pkg_label", y="per", hue="include_night", dodge=True, palette="viridis", alpha=0.9, s=2) | ||
plt.ylabel("Correlation coefficient") | ||
plt.tight_layout() | ||
plt.savefig(os.path.join(PATH_FIGURES, "figure_35_per_exclude_analysis.pdf")) | ||
plt.show(block=True) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.