-
Notifications
You must be signed in to change notification settings - Fork 0
/
pertubate.py
98 lines (62 loc) · 2.58 KB
/
pertubate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
from sklearn.preprocessing import OneHotEncoder
import numpy as np
import pandas as pd
import warnings
from NN_Classifier import *
from NN_Regressor import *
from CounterfactualSurrogateModel import *
warnings.filterwarnings(
"ignore", message="DataFrame is highly fragmented")
# # # # # # -------------------------------
dryBean_category_features = []
dryBean_continous_features = ['Area', 'Perimeter', 'MajorAxisLength', 'MinorAxisLength', 'AspectRation', 'Eccentricity', 'ConvexArea',
'EquivDiameter', 'Extent', 'Solidity', 'roundness', 'Compactness', 'ShapeFactor1', 'ShapeFactor2', 'ShapeFactor3', 'ShapeFactor4']
dryBean = NN_Classifier('dry-bean',
hidden_layer_sizes=(
16, 50, 50, 50, 50, 50, 50, 50, 50, 1000),
categorical_features=dryBean_category_features,
continous_features=dryBean_continous_features,
)
dryBean.load_data()
dryBean.split_data()
# dryBean.train()
dryBean.loadModel()
dryBean.evaluate()
example = dryBean.X.iloc[[66]]
# localisedData = dryBean.getLocalisedData(example, 0.2)
# localisedClasses = dryBean.getLocalisedData(example, 0.3)
print(dryBean.X.shape)
# samples = dryBean.getGlobalRandomSample(10)
example = dryBean.X.iloc[[66]]
# # example = s.transform(example)
# # print(dryBean.clf['preprocessor'].transformers[1])
# scaler = MinMaxScaler()
# scaler.fit(dryBean.X[dryBean_continous_features])
# # print(example)
# example[dryBean_continous_features] = scaler.transform(
# example[dryBean_continous_features])
# examples = generate_perturbations(example, 1000, 0.5)
# examples[dryBean_continous_features] = scaler.inverse_transform(
# examples[dryBean_continous_features])
# print(examples)
# # # # -------------------------------
category_features = ['workclass', 'education', 'marital-status',
'occupation', 'relationship', 'race', 'sex', 'native-country']
continous_features = ['age', 'fnlwgt', 'capital-gain',
'capital-loss', 'hours-per-week', 'education-num']
date_features = []
adult = NN_Classifier('adult',
hidden_layer_sizes=(100, 100, 100, 100),
categorical_features=category_features, continous_features=continous_features
)
adult.load_data()
adult.split_data()
# adult.train()
adult.loadModel()
adult.evaluate()
print(adult.X.shape)
def getLocalisedData(self, index , n_samples=1000, radius=1.5, globalSample=50000):
samples = self.getGlobalRandomSample(adult.X, globalSample)
example = self.X.iloc[[index]]
local = self.getLocalisedData(samples, example, radius=radius)
return self.getGlobalRandomSample(local, n_samples)