MohammadMurtuza commited on
Commit
70ece7b
·
verified ·
1 Parent(s): 694dd79

Create dataset_generator.py

Browse files
Files changed (1) hide show
  1. dataset_generator.py +139 -0
dataset_generator.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ def create_highly_complex_dataset(n_samples=500000, n_features=15, noise_level=0.3, random_state=42):
4
+ if random_state is not None:
5
+ np.random.seed(random_state)
6
+
7
+ # Generate base data with different distributions
8
+ X = np.column_stack([
9
+ np.random.normal(0, 1, n_samples), # Normal
10
+ np.random.uniform(-2, 2, n_samples), # Uniform
11
+ np.random.exponential(1, n_samples), # Exponential
12
+ np.random.lognormal(0, 1, n_samples), # Log-normal
13
+ np.random.beta(2, 5, n_samples), # Beta
14
+ np.random.gamma(2, 2, n_samples), # Gamma
15
+ np.random.chisquare(3, n_samples), # Chi-squared
16
+ np.random.normal(1, 0.5, n_samples), # Shifted normal
17
+ np.random.uniform(-3, 3, n_samples), # Wider uniform
18
+ np.random.normal(0, 2, n_samples), # Higher variance normal
19
+ np.random.logistic(0, 1, n_samples), # Logistic
20
+ np.random.rayleigh(1, n_samples), # Rayleigh
21
+ np.random.poisson(3, n_samples), # Poisson
22
+ np.random.geometric(0.3, n_samples), # Geometric
23
+ np.random.weibull(1.5, n_samples) # Weibull
24
+ ])
25
+
26
+ # Extremely complex target with multiple non-linear transformations
27
+ complex_target = (
28
+ # Trigonometric interactions
29
+ np.sin(X[:, 0] * X[:, 1]) * np.cos(X[:, 2]) +
30
+ np.tan(X[:, 3] * 0.5) * np.arctan(X[:, 4]) +
31
+
32
+ # Exponential and logarithmic interactions
33
+ np.exp(X[:, 5] * 0.3) * np.log1p(np.abs(X[:, 6])) +
34
+ np.power(X[:, 7], 2) * np.sqrt(np.abs(X[:, 8])) +
35
+
36
+ # Polynomial interactions (high degree)
37
+ X[:, 0]**3 * X[:, 1]**2 +
38
+ X[:, 2]**4 * X[:, 3] +
39
+ X[:, 4]**2 * X[:, 5]**3 +
40
+
41
+ # Conditional relationships
42
+ np.where(X[:, 6] > 0, X[:, 7]**2, -X[:, 7]**2) +
43
+ np.where(X[:, 8] < 0, np.sin(X[:, 9]), np.cos(X[:, 9])) +
44
+
45
+ # Multi-feature interactions
46
+ X[:, 0] * X[:, 1] * X[:, 2] +
47
+ X[:, 3] * X[:, 4] * X[:, 5] +
48
+ X[:, 6] * X[:, 7] * X[:, 8] +
49
+
50
+ # Complex periodic patterns
51
+ np.sin(X[:, 0] * 2 * np.pi) * np.cos(X[:, 1] * 3 * np.pi) +
52
+ np.sin(X[:, 2] + X[:, 3]) * np.cos(X[:, 4] - X[:, 5]) +
53
+
54
+ # Saturation effects
55
+ np.tanh(X[:, 9] * 2) * np.arctan(X[:, 10] * 3) +
56
+
57
+ # Piecewise linear with multiple breakpoints
58
+ np.piecewise(X[:, 11],
59
+ [X[:, 11] < -1, (X[:, 11] >= -1) & (X[:, 11] < 1), X[:, 11] >= 1],
60
+ [lambda x: -x**2, lambda x: x**3, lambda x: np.sqrt(x)]) +
61
+
62
+ # Random feature combinations
63
+ X[:, 12] * np.sin(X[:, 13]) * np.cos(X[:, 14]) +
64
+ X[:, 13] * np.tanh(X[:, 12]) * np.arctan(X[:, 11]) +
65
+
66
+ # Highly non-linear transformations
67
+ np.log1p(np.abs(X[:, 0] * X[:, 1] * X[:, 2])) +
68
+ np.exp(np.sin(X[:, 3]) + np.cos(X[:, 4])) +
69
+
70
+ # Interaction with feature products
71
+ (X[:, 5] * X[:, 6]) / (1 + np.abs(X[:, 7] * X[:, 8])) +
72
+ np.sin(X[:, 9] * X[:, 10]) * np.cos(X[:, 11] * X[:, 12])
73
+ )
74
+
75
+ # Add significant noise
76
+ y = complex_target + np.random.normal(0, noise_level * np.std(complex_target), n_samples)
77
+
78
+ return X, y
79
+
80
+ def create_extended_dataset(n_samples=500000, random_state=42):
81
+ if random_state is not None:
82
+ np.random.seed(random_state)
83
+
84
+ # Original 15 complex features
85
+ X_original, y = create_highly_complex_dataset(n_samples, random_state=random_state)
86
+
87
+ # 5 numeric features with moderate complexity
88
+ X_moderate = np.column_stack([
89
+ # Moderate complexity features - some non-linear relationships
90
+ np.sin(X_original[:, 0] * 0.5) + np.cos(X_original[:, 1] * 0.3),
91
+ np.log1p(np.abs(X_original[:, 2] * X_original[:, 3])),
92
+ np.tanh(X_original[:, 4] * 0.7) * np.arctan(X_original[:, 5] * 0.4),
93
+ np.sqrt(np.abs(X_original[:, 6])) + X_original[:, 7] * 0.2,
94
+ np.exp(X_original[:, 8] * 0.1) - np.exp(X_original[:, 9] * -0.1)
95
+ ])
96
+
97
+ # 5 simple numeric features
98
+ X_simple = np.column_stack([
99
+ np.random.normal(0, 1, n_samples),
100
+ np.random.uniform(-1, 1, n_samples),
101
+ np.random.exponential(0.5, n_samples),
102
+ np.random.normal(0.5, 0.3, n_samples),
103
+ np.random.beta(1, 1, n_samples)
104
+ ])
105
+
106
+ # 15 categorical features with different number of categories
107
+ categorical_features = []
108
+ n_categories_list = [2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 15, 20, 25, 30, 50]
109
+
110
+ for n_categories in n_categories_list:
111
+ # Generate categorical features with different distributions
112
+ if n_categories <= 5:
113
+ # More balanced categories
114
+ cat_feature = np.random.randint(0, n_categories, n_samples)
115
+ else:
116
+ # Some categories more frequent than others
117
+ probs = np.random.dirichlet(np.ones(n_categories) * 2)
118
+ cat_feature = np.random.choice(n_categories, n_samples, p=probs)
119
+
120
+ categorical_features.append(cat_feature)
121
+
122
+ X_categorical = np.column_stack(categorical_features)
123
+
124
+ # Combine all features
125
+ X_combined = np.column_stack([X_original, X_moderate, X_simple, X_categorical])
126
+
127
+ y_updated = y + (
128
+ # Add some moderate influence from new numeric features
129
+ X_moderate[:, 0] * 0.3 +
130
+ X_moderate[:, 1] * 0.2 +
131
+ X_simple[:, 0] * 0.1 +
132
+ X_simple[:, 1] * 0.05 +
133
+ # Add some categorical influence (using first 5 categorical features)
134
+ (X_categorical[:, 0] / n_categories_list[0]) * 0.4 +
135
+ (X_categorical[:, 1] / n_categories_list[1]) * 0.3 +
136
+ (X_categorical[:, 2] / n_categories_list[2]) * 0.2
137
+ )
138
+
139
+ return X_combined, y_updated