Advertisement
mayankjoin3

fs-41

Nov 15th, 2024
68
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 129.63 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. """CIC_FS_GAN_class.ipynb
  3.  
  4. Automatically generated by Colab.
  5.  
  6. Original file is located at
  7.    https://colab.research.google.com/drive/1g7dDDwvbq0wtXf4dxuqgLDesUbUMnudi
  8. """
  9.  
  10. from datetime import datetime
  11.  
  12. start_time = datetime.now()
  13.  
  14.  
  15. # from google.colab import drive
  16. # drive.mount('/content/drive')
  17.  
  18. import pandas as pd
  19. import numpy as np
  20. data =pd.read_csv('CICIoT2023_part-00094_full_data.csv')
  21.  
  22. data.head()
  23.  
  24. data.shape
  25.  
  26. data = data.dropna()
  27. data.shape
  28.  
  29. features = list(data.columns[:-1])
  30.  
  31. print(features)
  32.  
  33. from sklearn.preprocessing import LabelEncoder, MinMaxScaler
  34. from sklearn.compose import ColumnTransformer
  35. from sklearn.pipeline import Pipeline
  36.  
  37. # Identify columns with object (string) data type
  38. categorical_cols = data.select_dtypes(include=['object']).columns
  39.  
  40. # Create a LabelEncoder for each categorical column
  41. label_encoders = {}
  42. for col in categorical_cols:
  43.     le = LabelEncoder()
  44.     data[col] = le.fit_transform(data[col])
  45.     label_encoders[col] = le
  46.  
  47.  
  48. # Identify columns to be scaled (excluding categorical columns)
  49. numerical_cols = data.columns.difference(categorical_cols)
  50.  
  51. # Create a MinMaxScaler
  52. scaler = MinMaxScaler()
  53.  
  54. # Apply scaling to numerical columns
  55. scaled_data = scaler.fit_transform(data[numerical_cols])
  56.  
  57. # Combine the scaled numerical data and the encoded categorical data
  58. scaled_df = pd.DataFrame(scaled_data, columns=numerical_cols, index=data.index)
  59. encoded_df = data[categorical_cols]
  60.  
  61. processed_data = pd.concat([scaled_df, encoded_df], axis=1)
  62.  
  63. features = list(processed_data.columns[:-1])
  64. X = processed_data[features].values
  65. y = data.iloc[:, -1].values
  66.  
  67. processed_data.to_csv('cic-2023_pre-processed_wed_cleaned_output.csv', index=False)
  68.  
  69. Data_fraction = 0.1
  70. MAX_ITER = 10
  71. df = processed_data.sample(frac=Data_fraction, random_state=42)
  72.  
  73.  
  74.  
  75. df.to_csv('cic-2023_pre-processed_wed_cleaned_output_1pc.csv', index=False)
  76.  
  77. X = df[features].values
  78. y = df.iloc[:, -1].values
  79.  
  80. import numpy as np
  81. import random
  82. from sklearn.model_selection import train_test_split
  83. from sklearn.neighbors import KNeighborsClassifier
  84. from sklearn.metrics import accuracy_score
  85. import numpy as np
  86. from scipy.optimize import differential_evolution
  87. from sklearn.model_selection import train_test_split
  88. from sklearn.neighbors import KNeighborsClassifier
  89. from sklearn.metrics import accuracy_score
  90. import time
  91. from joblib import Parallel, delayed
  92. import numpy as np
  93. from sklearn.model_selection import train_test_split
  94. from sklearn.neighbors import KNeighborsClassifier
  95. from sklearn.metrics import accuracy_score
  96. import csv
  97. import time
  98. import numpy as np
  99. import random
  100. from sklearn.model_selection import train_test_split
  101. from sklearn.neighbors import KNeighborsClassifier
  102. from sklearn.metrics import accuracy_score
  103.  
  104. """**Cuckoo Search**"""
  105.  
  106. import numpy as np
  107. from sklearn.model_selection import train_test_split
  108. from sklearn.neighbors import KNeighborsClassifier
  109. from sklearn.metrics import accuracy_score
  110.  
  111. class CuckooSearch:
  112.     def __init__(self, X, y, pop_size=20, max_iter=10, pa=0.25, Lambda=1.5):
  113.         self.X = X
  114.         self.y = y
  115.         self.pop_size = pop_size
  116.         self.max_iter = max_iter
  117.         self.pa = pa
  118.         self.Lambda = Lambda
  119.         self.num_features = X.shape[1]
  120.         self.population = self.generate_initial_population()
  121.         self.fitness_values = np.zeros(pop_size)
  122.         self.features = None  # Can be set if feature names are available
  123.  
  124.     def levy_flight(self):
  125.         u = np.random.normal(0, 1, size=1)
  126.         v = np.random.normal(0, 1, size=1)
  127.         step = u / np.power(np.abs(v), 1 / self.Lambda)
  128.         return step
  129.  
  130.     def fitness_function(self, X_train, X_test, y_train, y_test, solution):
  131.         selected_features = np.where(solution == 1)[0]
  132.         selected_features = selected_features[selected_features < X_train.shape[1]]
  133.  
  134.         if len(selected_features) == 0:
  135.             return 0
  136.  
  137.         model = KNeighborsClassifier()
  138.         model.fit(X_train[:, selected_features], y_train)
  139.         y_pred = model.predict(X_test[:, selected_features])
  140.         return accuracy_score(y_test, y_pred)
  141.  
  142.     def generate_initial_population(self):
  143.         return np.random.randint(2, size=(self.pop_size, self.num_features))
  144.  
  145.     def get_best_solution(self):
  146.         best_index = np.argmax(self.fitness_values)
  147.         return self.population[best_index], self.fitness_values[best_index]
  148.  
  149.     def search(self):
  150.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  151.  
  152.         for i in range(self.pop_size):
  153.             self.fitness_values[i] = self.fitness_function(X_train, X_test, y_train, y_test, self.population[i])
  154.  
  155.         best_solution, best_fitness = self.get_best_solution()
  156.  
  157.         for iteration in range(self.max_iter):
  158.             new_population = self.population.copy()
  159.  
  160.             for i in range(self.pop_size):
  161.                 cuckoo = self.population[i] + self.levy_flight()
  162.                 cuckoo = np.clip(cuckoo, 0, 1) > np.random.random(self.num_features)
  163.  
  164.                 fitness_cuckoo = self.fitness_function(X_train, X_test, y_train, y_test, cuckoo)
  165.  
  166.                 if fitness_cuckoo > self.fitness_values[i]:
  167.                     new_population[i] = cuckoo
  168.                     self.fitness_values[i] = fitness_cuckoo
  169.  
  170.             abandon_indices = np.random.rand(self.pop_size) < self.pa
  171.             new_population[abandon_indices] = self.generate_initial_population()[abandon_indices]
  172.  
  173.             for i in np.where(abandon_indices)[0]:
  174.                 self.fitness_values[i] = self.fitness_function(X_train, X_test, y_train, y_test, new_population[i])
  175.  
  176.             self.population = new_population
  177.             best_solution, best_fitness = self.get_best_solution()
  178.  
  179.             print(f"Iteration {iteration + 1}, Best Fitness: {best_fitness}")
  180.  
  181.         selected_indices = np.where(best_solution == 1)[0]
  182.         selected_features = [features[i] for i in selected_indices]
  183.         return selected_features
  184.  
  185. """**Evolutionary Programming**"""
  186.  
  187. import numpy as np
  188. import time
  189. from sklearn.model_selection import train_test_split
  190. from sklearn.neighbors import KNeighborsClassifier
  191. from sklearn.metrics import accuracy_score
  192.  
  193. class EvolutionaryProgramming:
  194.     def __init__(self, X, y, pop_size=20, max_iter=10, mutation_rate=0.1):
  195.         self.X = X
  196.         self.y = y
  197.         self.pop_size = pop_size
  198.         self.max_iter = max_iter
  199.         self.mutation_rate = mutation_rate
  200.         self.generation_counter = 0
  201.  
  202.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  203.         selected_features = np.where(solution > 0.5)[0]
  204.  
  205.         if len(selected_features) == 0:
  206.             return 1  # Return worst fitness if no features are selected
  207.  
  208.         # Train and test classifier with the selected features
  209.         model = KNeighborsClassifier(n_neighbors=3)  # Simplified classifier
  210.         model.fit(X_train[:, selected_features], y_train)
  211.         y_pred = model.predict(X_test[:, selected_features])
  212.  
  213.         # Minimize the negative accuracy
  214.         return -accuracy_score(y_test, y_pred)
  215.  
  216.     def mutate(self, solution):
  217.         # Flip bits based on the mutation rate
  218.         mutation = np.random.rand(len(solution)) < self.mutation_rate
  219.         return np.where(mutation, 1 - solution, solution)
  220.  
  221.     def search(self):
  222.         # Split the data into training and test sets
  223.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  224.  
  225.         # Initialize the population (random binary solutions)
  226.         population = np.random.randint(0, 2, size=(self.pop_size, self.X.shape[1]))
  227.  
  228.         # Start time to monitor the timing of each generation
  229.         start_time = time.time()
  230.  
  231.         # Evolutionary Programming Loop
  232.         for generation in range(self.max_iter):
  233.             # Evaluate fitness of the current population
  234.             fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  235.  
  236.             # Track progress
  237.             best_fitness = -np.min(fitness_scores)
  238.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  239.             self.generation_counter += 1
  240.  
  241.             # Select parents (top 50% individuals)
  242.             num_parents = self.pop_size // 2
  243.             sorted_indices = np.argsort(fitness_scores)
  244.             parents = population[sorted_indices[:num_parents]]
  245.  
  246.             # Mutate offspring
  247.             offspring = np.array([self.mutate(parents[np.random.randint(num_parents)]) for _ in range(self.pop_size)])
  248.  
  249.             # Combine parents and offspring to form the new population
  250.             population = np.vstack((parents, offspring))
  251.  
  252.         # End time
  253.         end_time = time.time()
  254.         total_time = end_time - start_time
  255.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  256.  
  257.         # Return the best solution found
  258.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  259.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  260.  
  261.         # Extract the selected features from the best solution
  262.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  263.         selected_features = [features[i] for i in selected_indices]
  264.         return selected_features
  265.  
  266. """**Firefly Optimization**"""
  267.  
  268. import numpy as np
  269. import time
  270. from sklearn.model_selection import train_test_split
  271. from sklearn.neighbors import KNeighborsClassifier
  272. from sklearn.metrics import accuracy_score
  273.  
  274. class Firefly:
  275.     def __init__(self, X, y, pop_size=20, max_iter=10, alpha=0.2, beta_min=0.2, gamma=1.0):
  276.         self.X = X
  277.         self.y = y
  278.         self.pop_size = pop_size
  279.         self.max_iter = max_iter
  280.         self.alpha = alpha
  281.         self.beta_min = beta_min
  282.         self.gamma = gamma
  283.         self.generation_counter = 0
  284.  
  285.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  286.         selected_features = np.where(solution > 0.5)[0]
  287.  
  288.         if len(selected_features) == 0:
  289.             return 1  # Return worst fitness if no features are selected
  290.  
  291.         # Train and test classifier with the selected features
  292.         model = KNeighborsClassifier(n_neighbors=3)  # Simplified classifier
  293.         model.fit(X_train[:, selected_features], y_train)
  294.         y_pred = model.predict(X_test[:, selected_features])
  295.  
  296.         # Minimize the negative accuracy
  297.         return -accuracy_score(y_test, y_pred)
  298.  
  299.     def move_firefly(self, firefly_i, firefly_j, beta):
  300.         random_factor = self.alpha * (np.random.rand(len(firefly_i)) - 0.5)
  301.         new_position = firefly_i + beta * (firefly_j - firefly_i) + random_factor
  302.         return np.clip(new_position, 0, 1)  # Ensure solution is in [0, 1]
  303.  
  304.     def search(self):
  305.         # Split the data into training and test sets
  306.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  307.  
  308.         # Initialize the population (random binary solutions)
  309.         population = np.random.rand(self.pop_size, self.X.shape[1])
  310.  
  311.         # Compute initial fitness for the population
  312.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  313.  
  314.         # Start time to monitor the timing of each generation
  315.         start_time = time.time()
  316.  
  317.         # Firefly Optimization Loop
  318.         for generation in range(self.max_iter):
  319.             for i in range(self.pop_size):
  320.                 for j in range(self.pop_size):
  321.                     if fitness_scores[j] < fitness_scores[i]:  # Firefly j is more attractive
  322.                         beta = self.beta_min * np.exp(-self.gamma * np.linalg.norm(population[i] - population[j]) ** 2)
  323.                         population[i] = self.move_firefly(population[i], population[j], beta)
  324.  
  325.                         # Recalculate fitness after moving
  326.                         fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  327.  
  328.             # Track progress
  329.             best_fitness = -np.min(fitness_scores)
  330.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  331.             self.generation_counter += 1
  332.  
  333.         # End time
  334.         end_time = time.time()
  335.         total_time = end_time - start_time
  336.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  337.  
  338.         # Return the best solution found
  339.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  340.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  341.  
  342.         # Extract the selected feature names from the best solution
  343.         selected_indices = np.where(best_solution > 0.5)[0]
  344.         selected_features = [features[i] for i in selected_indices]
  345.         return selected_features
  346.  
  347. """**Adaptive Bacterial Foraging Optimization**"""
  348.  
  349. import numpy as np
  350. import time
  351. from sklearn.model_selection import train_test_split
  352. from sklearn.neighbors import KNeighborsClassifier
  353. from sklearn.metrics import accuracy_score
  354.  
  355. class AdaptiveBacterialForaging:
  356.     def __init__(self, X, y, pop_size=20, max_iter=10, C=0.1, elimination_prob=0.1, reproduction_prob=0.5):
  357.         self.X = X
  358.         self.y = y
  359.         self.pop_size = pop_size
  360.         self.max_iter = max_iter
  361.         self.C = C
  362.         self.elimination_prob = elimination_prob
  363.         self.reproduction_prob = reproduction_prob
  364.         self.generation_counter = 0
  365.  
  366.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  367.         selected_features = np.where(solution > 0.5)[0]
  368.  
  369.         if len(selected_features) == 0:
  370.             return 1  # Return worst fitness if no features selected
  371.  
  372.         # Train and test classifier with the selected features
  373.         model = KNeighborsClassifier(n_neighbors=3)
  374.         model.fit(X_train[:, selected_features], y_train)
  375.         y_pred = model.predict(X_test[:, selected_features])
  376.  
  377.         # Minimize the negative accuracy
  378.         return -accuracy_score(y_test, y_pred)
  379.  
  380.     def chemotaxis(self, bacteria, fitness_scores, X_train, X_test, y_train, y_test):
  381.         for i in range(len(bacteria)):
  382.             step = self.C * np.random.randn(bacteria.shape[1])
  383.             new_bacteria = bacteria[i] + step
  384.             new_bacteria = np.clip(new_bacteria, 0, 1)  # Ensure solution remains in [0, 1]
  385.  
  386.             # Calculate fitness for the new solution
  387.             new_fitness = self.fitness_function(new_bacteria, X_train, X_test, y_train, y_test)
  388.             if new_fitness < fitness_scores[i]:  # If fitness improves, update bacteria position
  389.                 bacteria[i] = new_bacteria
  390.                 fitness_scores[i] = new_fitness
  391.  
  392.         return bacteria, fitness_scores
  393.  
  394.     def reproduction(self, bacteria, fitness_scores):
  395.         # Sort bacteria by fitness and select the better half
  396.         sorted_indices = np.argsort(fitness_scores)
  397.         bacteria = bacteria[sorted_indices]
  398.         fitness_scores = fitness_scores[sorted_indices]
  399.  
  400.         # Replace the worst half by cloning the better half
  401.         for i in range(len(bacteria) // 2):
  402.             bacteria[-(i+1)] = bacteria[i]
  403.             fitness_scores[-(i+1)] = fitness_scores[i]
  404.  
  405.         return bacteria, fitness_scores
  406.  
  407.     def elimination_dispersal(self, bacteria, fitness_scores, X_train, X_test, y_train, y_test):
  408.         for i in range(len(bacteria)):
  409.             if np.random.rand() < self.elimination_prob:
  410.                 # Replace the bacteria with a new random solution
  411.                 bacteria[i] = np.random.rand(bacteria.shape[1])
  412.                 fitness_scores[i] = self.fitness_function(bacteria[i], X_train, X_test, y_train, y_test)
  413.  
  414.         return bacteria, fitness_scores
  415.  
  416.     def search(self):
  417.         # Split the data into training and test sets
  418.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  419.  
  420.         # Initialize the population (random binary solutions)
  421.         bacteria = np.random.rand(self.pop_size, self.X.shape[1])
  422.  
  423.         # Compute initial fitness for the population
  424.         fitness_scores = np.array([self.fitness_function(bac, X_train, X_test, y_train, y_test) for bac in bacteria])
  425.  
  426.         # Start time to monitor the timing of each generation
  427.         start_time = time.time()
  428.  
  429.         # ABFO Loop
  430.         for generation in range(self.max_iter):
  431.             # Chemotaxis
  432.             bacteria, fitness_scores = self.chemotaxis(bacteria, fitness_scores, X_train, X_test, y_train, y_test)
  433.  
  434.             # Reproduction
  435.             if np.random.rand() < self.reproduction_prob:
  436.                 bacteria, fitness_scores = self.reproduction(bacteria, fitness_scores)
  437.  
  438.             # Elimination and Dispersal
  439.             bacteria, fitness_scores = self.elimination_dispersal(bacteria, fitness_scores, X_train, X_test, y_train, y_test)
  440.  
  441.             # Track progress
  442.             best_fitness = -np.min(fitness_scores)
  443.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  444.             self.generation_counter += 1
  445.  
  446.         # End time
  447.         end_time = time.time()
  448.         total_time = end_time - start_time
  449.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  450.  
  451.         # Return the names of the best selected features
  452.         best_solution = bacteria[np.argmin(fitness_scores)]
  453.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  454.  
  455. import numpy as np
  456. from sklearn.model_selection import train_test_split
  457. from sklearn.neighbors import KNeighborsClassifier
  458. from sklearn.metrics import accuracy_score
  459. import time
  460.  
  461. class AntColony:
  462.     def __init__(self, X, y, pop_size=20, max_iter=10, alpha=1.0, beta=1.0, decay=0.1):
  463.         self.X = X
  464.         self.y = y
  465.         self.pop_size = pop_size
  466.         self.max_iter = max_iter
  467.         self.alpha = alpha
  468.         self.beta = beta
  469.         self.decay = decay
  470.         self.features = X.shape[1]
  471.  
  472.     # Fitness function for feature selection
  473.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  474.         selected_features = np.where(solution > 0.5)[0]
  475.  
  476.         if len(selected_features) == 0:
  477.             return 1  # Return worst fitness if no features selected
  478.  
  479.         # Train and test classifier with the selected features
  480.         model = KNeighborsClassifier(n_neighbors=3)
  481.         model.fit(X_train[:, selected_features], y_train)
  482.         y_pred = model.predict(X_test[:, selected_features])
  483.  
  484.         # Minimize the negative accuracy
  485.         return -accuracy_score(y_test, y_pred)
  486.  
  487.     # Function to initialize pheromone matrix
  488.     def initialize_pheromone_matrix(self, initial_pheromone=0.1):
  489.         return np.ones(self.features) * initial_pheromone
  490.  
  491.     # Function to choose features based on pheromone values
  492.     def select_features(self, pheromone):
  493.         probabilities = pheromone ** self.alpha
  494.         probabilities /= np.sum(probabilities)
  495.         return np.random.rand(len(pheromone)) < probabilities
  496.  
  497.     # Function to update pheromone matrix
  498.     def update_pheromone(self, pheromone, best_solution):
  499.         pheromone *= (1 - self.decay)  # Evaporation
  500.         pheromone += best_solution  # Reinforce pheromone on the best solution
  501.         return pheromone
  502.  
  503.     # Ant Colony Optimization for feature selection
  504.     def search(self):
  505.         global generation_counter
  506.         generation_counter = 0
  507.  
  508.         # Split the data into training and test sets
  509.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  510.  
  511.         # Initialize pheromone matrix for all features
  512.         pheromone = self.initialize_pheromone_matrix()
  513.  
  514.         # Start time to monitor the timing of each generation
  515.         start_time = time.time()
  516.  
  517.         best_solution = None
  518.         best_fitness = float('inf')
  519.  
  520.         # ACO Loop
  521.         for generation in range(self.max_iter):
  522.             population = np.zeros((self.pop_size, self.features))
  523.             fitness_scores = np.zeros(self.pop_size)
  524.  
  525.             # Each ant constructs a solution
  526.             for i in range(self.pop_size):
  527.                 # Ant selects features based on pheromone trail
  528.                 solution = self.select_features(pheromone)
  529.                 population[i] = solution
  530.  
  531.                 # Calculate fitness for the constructed solution
  532.                 fitness_scores[i] = self.fitness_function(solution, X_train, X_test, y_train, y_test)
  533.  
  534.                 # Update best solution if necessary
  535.                 if fitness_scores[i] < best_fitness:
  536.                     best_fitness = fitness_scores[i]
  537.                     best_solution = solution
  538.  
  539.             # Update pheromone matrix based on the best solution found in this iteration
  540.             pheromone = self.update_pheromone(pheromone, best_solution)
  541.  
  542.             # Track progress
  543.             print(f"Generation {generation_counter}: Best fitness = {-best_fitness}")
  544.             generation_counter += 1
  545.  
  546.         # End time
  547.         end_time = time.time()
  548.         total_time = end_time - start_time
  549.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  550.  
  551.         selected_indices = np.where(best_solution > 0.5)[0]
  552.         selected_features = [features[i] for i in selected_indices]
  553.         return selected_features
  554.  
  555. """**Artificial Bee Colony Optimization**"""
  556.  
  557. import numpy as np
  558. from sklearn.model_selection import train_test_split
  559. from sklearn.neighbors import KNeighborsClassifier
  560. from sklearn.metrics import accuracy_score
  561. import time
  562.  
  563. class ArtificialBeeColony:
  564.     def __init__(self, X, y, pop_size=20, max_iter=10, limit=5):
  565.         self.X = X
  566.         self.y = y
  567.         self.pop_size = pop_size
  568.         self.max_iter = max_iter
  569.         self.limit = limit
  570.         self.features = X.shape[1]
  571.  
  572.     # Fitness function for feature selection
  573.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  574.         selected_features = np.where(solution > 0.5)[0]
  575.  
  576.         if len(selected_features) == 0:
  577.             return 1  # Return worst fitness if no features selected
  578.  
  579.         # Train and test classifier with the selected features
  580.         model = KNeighborsClassifier(n_neighbors=3)
  581.         model.fit(X_train[:, selected_features], y_train)
  582.         y_pred = model.predict(X_test[:, selected_features])
  583.  
  584.         # Minimize the negative accuracy
  585.         return -accuracy_score(y_test, y_pred)
  586.  
  587.     # ABC Optimization process
  588.     def search(self):
  589.         global generation_counter
  590.         generation_counter = 0
  591.  
  592.         # Split the data into training and test sets
  593.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  594.  
  595.         # Initialize the population (random binary solutions)
  596.         population = np.random.rand(self.pop_size, self.features)
  597.  
  598.         # Initialize fitness scores
  599.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  600.  
  601.         # Start time to monitor the timing of each generation
  602.         start_time = time.time()
  603.  
  604.         # ABC Optimization Loop
  605.         for generation in range(self.max_iter):
  606.             # Employed bees search for new solutions
  607.             for i in range(self.pop_size):
  608.                 # Choose a random feature to change
  609.                 new_solution = population[i].copy()
  610.                 random_feature = np.random.randint(0, self.features)
  611.                 new_solution[random_feature] = 1 - new_solution[random_feature]  # Flip the feature selection
  612.  
  613.                 # Evaluate the new solution
  614.                 new_fitness = self.fitness_function(new_solution, X_train, X_test, y_train, y_test)
  615.  
  616.                 # Greedily replace the old solution if the new one is better
  617.                 if new_fitness < fitness_scores[i]:
  618.                     population[i] = new_solution
  619.                     fitness_scores[i] = new_fitness
  620.  
  621.             # Onlooker bees select solutions based on fitness
  622.             probabilities = 1 / (1 + fitness_scores)
  623.             probabilities /= np.sum(probabilities)  # Normalize probabilities
  624.  
  625.             for i in range(self.pop_size):
  626.                 if np.random.rand() < probabilities[i]:  # Select this solution
  627.                     new_solution = population[i].copy()
  628.                     random_feature = np.random.randint(0, self.features)
  629.                     new_solution[random_feature] = 1 - new_solution[random_feature]  # Flip the feature selection
  630.  
  631.                     # Evaluate the new solution
  632.                     new_fitness = self.fitness_function(new_solution, X_train, X_test, y_train, y_test)
  633.  
  634.                     # Greedily replace if the new one is better
  635.                     if new_fitness < fitness_scores[i]:
  636.                         population[i] = new_solution
  637.                         fitness_scores[i] = new_fitness
  638.  
  639.             # Scout bees search for new solutions if no improvement after limit iterations
  640.             for i in range(self.pop_size):
  641.                 if fitness_scores[i] >= self.limit:  # Check if it meets the limit
  642.                     population[i] = np.random.rand(self.features)  # Restart solution
  643.                     fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  644.  
  645.             # Track progress
  646.             best_fitness = -np.min(fitness_scores)
  647.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  648.             generation_counter += 1
  649.  
  650.         # End time
  651.         end_time = time.time()
  652.         total_time = end_time - start_time
  653.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  654.  
  655.         # Return the best solution found
  656.         best_solution = population[np.argmin(fitness_scores)]
  657.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  658.  
  659. """**Sine Cosine Optimization**"""
  660.  
  661. import numpy as np
  662. from sklearn.model_selection import train_test_split
  663. from sklearn.neighbors import KNeighborsClassifier
  664. from sklearn.metrics import accuracy_score
  665. import time
  666.  
  667. class SineCosine:
  668.     def __init__(self, X, y, pop_size=20, max_iter=10, a=2):
  669.         self.X = X
  670.         self.y = y
  671.         self.pop_size = pop_size
  672.         self.max_iter = max_iter
  673.         self.a = a
  674.         self.features = X.shape[1]
  675.  
  676.     # Fitness function for feature selection
  677.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  678.         selected_features = np.where(solution > 0.5)[0]
  679.  
  680.         if len(selected_features) == 0:
  681.             return 1  # Return worst fitness if no features selected
  682.  
  683.         # Train and test classifier with the selected features
  684.         model = KNeighborsClassifier(n_neighbors=3)
  685.         model.fit(X_train[:, selected_features], y_train)
  686.         y_pred = model.predict(X_test[:, selected_features])
  687.  
  688.         # Minimize the negative accuracy
  689.         return -accuracy_score(y_test, y_pred)
  690.  
  691.     # Sine Cosine Optimization process
  692.     def search(self):
  693.         global generation_counter
  694.         generation_counter = 0
  695.  
  696.         # Split the data into training and test sets
  697.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  698.  
  699.         # Initialize the population (random binary solutions)
  700.         population = np.random.rand(self.pop_size, self.features)
  701.  
  702.         # Compute initial fitness for the population
  703.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  704.  
  705.         # Start time to monitor the timing of each generation
  706.         start_time = time.time()
  707.  
  708.         # SCO Optimization Loop
  709.         for generation in range(self.max_iter):
  710.             for i in range(self.pop_size):
  711.                 # Generate a new solution
  712.                 new_solution = np.zeros_like(population[i])
  713.  
  714.                 for j in range(self.features):
  715.                     # Calculate the sine and cosine components for the feature
  716.                     r1 = np.random.rand()
  717.                     r2 = np.random.rand()
  718.                     new_solution[j] = (np.sin(r1 * np.pi) * population[i][j] +
  719.                                        np.cos(r2 * np.pi) * (np.mean(population[:, j]) - population[i][j]))
  720.  
  721.                     # Ensure the solution is in [0, 1]
  722.                     new_solution[j] = np.clip(new_solution[j], 0, 1)
  723.  
  724.                 # Evaluate the new solution
  725.                 new_fitness = self.fitness_function(new_solution, X_train, X_test, y_train, y_test)
  726.  
  727.                 # Greedily replace the old solution if the new one is better
  728.                 if new_fitness < fitness_scores[i]:
  729.                     population[i] = new_solution
  730.                     fitness_scores[i] = new_fitness
  731.  
  732.             # Track progress
  733.             best_fitness = -np.min(fitness_scores)
  734.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  735.             generation_counter += 1
  736.  
  737.         # End time
  738.         end_time = time.time()
  739.         total_time = end_time - start_time
  740.         print(f"Total time taken for optimization: {total_time:.2f} seconds")
  741.  
  742.         # Return the best solution found
  743.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  744.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  745.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  746.  
  747. """**Social Spider Optimization**"""
  748.  
  749. import numpy as np
  750. from sklearn.model_selection import train_test_split
  751. from sklearn.neighbors import KNeighborsClassifier
  752. from sklearn.metrics import accuracy_score
  753. import time
  754.  
  755. class SocialSpider:
  756.     def __init__(self, X, y, pop_size=20, max_iter=10):
  757.         self.X = X
  758.         self.y = y
  759.         self.pop_size = pop_size
  760.         self.max_iter = max_iter
  761.         self.features = X.shape[1]
  762.  
  763.     # Fitness function for feature selection
  764.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  765.         selected_features = np.where(solution > 0.5)[0]
  766.         if len(selected_features) == 0:
  767.             return 1  # Worst fitness if no features selected
  768.  
  769.         # Train and test classifier with the selected features
  770.         model = KNeighborsClassifier(n_neighbors=3)
  771.         model.fit(X_train[:, selected_features], y_train)
  772.         y_pred = model.predict(X_test[:, selected_features])
  773.  
  774.         return -accuracy_score(y_test, y_pred)  # Minimize the negative accuracy
  775.  
  776.     # Social Spider Optimization process
  777.     def search(self):
  778.         global generation_counter
  779.         generation_counter = 0
  780.  
  781.         # Split the data into training and test sets
  782.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  783.  
  784.         # Initialize the population (random binary solutions)
  785.         population = np.random.rand(self.pop_size, self.features)
  786.  
  787.         # Compute initial fitness for the population
  788.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  789.  
  790.         # Start time to monitor the timing of each generation
  791.         start_time = time.time()
  792.  
  793.         # SSO Optimization Loop
  794.         for generation in range(self.max_iter):
  795.             for i in range(self.pop_size):
  796.                 spider_fitness = fitness_scores[i]
  797.                 best_spider = np.argmin(fitness_scores)
  798.  
  799.                 # Update position based on the best spider
  800.                 for j in range(self.features):
  801.                     r1 = np.random.rand()
  802.                     r2 = np.random.rand()
  803.                     if np.random.rand() < 0.5:
  804.                         new_value = population[i][j] + r1 * (population[best_spider][j] - population[i][j])
  805.                     else:
  806.                         new_value = population[i][j] + r2 * (np.mean(population, axis=0)[j] - population[i][j])
  807.  
  808.                     # Ensure the new value is within [0, 1]
  809.                     population[i][j] = np.clip(new_value, 0, 1)
  810.  
  811.                 # Evaluate the new solution
  812.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  813.  
  814.             # Track progress
  815.             best_fitness = -np.min(fitness_scores)
  816.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  817.             generation_counter += 1
  818.  
  819.         # End time
  820.         end_time = time.time()
  821.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  822.  
  823.         # Return the best solution found
  824.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  825.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  826.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  827.  
  828. """**Symbiotic Organisms Search Optimization**"""
  829.  
  830. import numpy as np
  831. from sklearn.model_selection import train_test_split
  832. from sklearn.neighbors import KNeighborsClassifier
  833. from sklearn.metrics import accuracy_score
  834. import time
  835.  
  836. class Symbiotic:
  837.     def __init__(self, X, y, pop_size=20, max_iter=10):
  838.         self.X = X
  839.         self.y = y
  840.         self.pop_size = pop_size
  841.         self.max_iter = max_iter
  842.         self.features = X.shape[1]
  843.  
  844.     # Fitness function for feature selection
  845.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  846.         selected_features = np.where(solution > 0.5)[0]
  847.         if len(selected_features) == 0:
  848.             return 1  # Worst fitness if no features selected
  849.  
  850.         # Train and test classifier with the selected features
  851.         model = KNeighborsClassifier(n_neighbors=3)
  852.         model.fit(X_train[:, selected_features], y_train)
  853.         y_pred = model.predict(X_test[:, selected_features])
  854.  
  855.         return -accuracy_score(y_test, y_pred)  # Minimize the negative accuracy
  856.  
  857.     # Symbiotic Organisms Search Optimization process
  858.     def search(self):
  859.         global generation_counter
  860.         generation_counter = 0
  861.  
  862.         # Split the data into training and test sets
  863.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  864.  
  865.         # Initialize the population (random binary solutions)
  866.         population = np.random.rand(self.pop_size, self.features)
  867.  
  868.         # Compute initial fitness for the population
  869.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  870.  
  871.         # Start time to monitor the timing of each generation
  872.         start_time = time.time()
  873.  
  874.         # SOS Optimization Loop
  875.         for generation in range(self.max_iter):
  876.             for i in range(self.pop_size):
  877.                 selected_index = np.random.choice(self.pop_size)
  878.                 while selected_index == i:
  879.                     selected_index = np.random.choice(self.pop_size)
  880.  
  881.                 # Mimic symbiotic behavior: adjust current organism towards a better neighbor
  882.                 population[i] += np.random.rand(self.features) * (population[selected_index] - population[i])
  883.  
  884.                 # Clip to ensure values are within bounds
  885.                 population[i] = np.clip(population[i], 0, 1)
  886.  
  887.                 # Evaluate the updated solution
  888.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  889.  
  890.             # Track progress
  891.             best_fitness = -np.min(fitness_scores)
  892.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  893.             generation_counter += 1
  894.  
  895.         # End time
  896.         end_time = time.time()
  897.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  898.  
  899.         # Return the best solution found
  900.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  901.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  902.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  903.  
  904. import numpy as np
  905. from sklearn.model_selection import train_test_split
  906. from sklearn.neighbors import KNeighborsClassifier
  907. from sklearn.metrics import accuracy_score
  908. import time
  909.  
  910. class BacterialForaging:
  911.     def __init__(self, X, y, pop_size=20, max_iter=10, num_steps=10, step_size=0.1):
  912.         self.X = X
  913.         self.y = y
  914.         self.pop_size = pop_size
  915.         self.max_iter = max_iter
  916.         self.num_steps = num_steps
  917.         self.step_size = step_size
  918.         self.features = X.shape[1]
  919.  
  920.     # Fitness function for feature selection
  921.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  922.         selected_features = np.where(solution > 0.5)[0]
  923.         if len(selected_features) == 0:
  924.             return 1  # Worst fitness if no features selected
  925.  
  926.         # Train and test classifier with the selected features
  927.         model = KNeighborsClassifier(n_neighbors=3)
  928.         model.fit(X_train[:, selected_features], y_train)
  929.         y_pred = model.predict(X_test[:, selected_features])
  930.  
  931.         return -accuracy_score(y_test, y_pred)  # Minimize the negative accuracy
  932.  
  933.     # Bacterial Foraging Optimization process
  934.     def search(self):
  935.         global generation_counter
  936.         generation_counter = 0
  937.  
  938.         # Split the data into training and test sets
  939.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  940.  
  941.         # Initialize the population (random binary solutions)
  942.         population = np.random.rand(self.pop_size, self.features)
  943.  
  944.         # Compute initial fitness for the population
  945.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  946.  
  947.         # Start time to monitor the timing of each generation
  948.         start_time = time.time()
  949.  
  950.         # BFO Optimization Loop
  951.         for generation in range(self.max_iter):
  952.             for i in range(self.pop_size):
  953.                 for step in range(self.num_steps):
  954.                     # Randomly adjust the bacterium's position
  955.                     previous_position = population[i].copy()
  956.                     population[i] += (np.random.rand(self.features) - 0.5) * self.step_size
  957.  
  958.                     # Clip to ensure values are within bounds
  959.                     population[i] = np.clip(population[i], 0, 1)
  960.                     new_fitness = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  961.  
  962.                     # If new fitness is better, keep the new position; else, revert
  963.                     if new_fitness < fitness_scores[i]:
  964.                         fitness_scores[i] = new_fitness
  965.                     else:
  966.                         population[i] = previous_position  # Revert to previous position
  967.  
  968.             # Track progress
  969.             best_fitness = -np.min(fitness_scores)
  970.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  971.             generation_counter += 1
  972.  
  973.         # End time
  974.         end_time = time.time()
  975.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  976.  
  977.         # Return the best solution found
  978.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  979.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  980.  
  981.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  982.  
  983. """**Bat Optimization**"""
  984.  
  985. import numpy as np
  986. from sklearn.model_selection import train_test_split
  987. from sklearn.neighbors import KNeighborsClassifier
  988. from sklearn.metrics import accuracy_score
  989. import time
  990.  
  991. class Bat:
  992.     def __init__(self, X, y, pop_size=20, max_iter=10, alpha=0.9, gamma=1.0):
  993.         self.X = X
  994.         self.y = y
  995.         self.pop_size = pop_size
  996.         self.max_iter = max_iter
  997.         self.alpha = alpha
  998.         self.gamma = gamma
  999.         self.features = X.shape[1]
  1000.  
  1001.     # Fitness function for feature selection
  1002.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1003.         selected_features = np.where(solution > 0.5)[0]
  1004.         if len(selected_features) == 0:
  1005.             return 1  # Worst fitness if no features selected
  1006.  
  1007.         model = KNeighborsClassifier(n_neighbors=3)
  1008.         model.fit(X_train[:, selected_features], y_train)
  1009.         y_pred = model.predict(X_test[:, selected_features])
  1010.  
  1011.         return -accuracy_score(y_test, y_pred)  # Minimize the negative accuracy
  1012.  
  1013.     # Bat Optimization process
  1014.     def search(self):
  1015.         global generation_counter
  1016.         generation_counter = 0
  1017.  
  1018.         # Split the data into training and test sets
  1019.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1020.  
  1021.         # Initialize population and velocities
  1022.         population = np.random.rand(self.pop_size, self.features)
  1023.         velocities = np.zeros_like(population)
  1024.  
  1025.         # Compute initial fitness
  1026.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1027.  
  1028.         # Start time to monitor the timing of each generation
  1029.         start_time = time.time()
  1030.  
  1031.         # Bat Optimization Loop
  1032.         for generation in range(self.max_iter):
  1033.             for i in range(self.pop_size):
  1034.                 r = np.random.rand()
  1035.                 if r > 0.5:
  1036.                     # Adjust the velocity and update the solution
  1037.                     velocities[i] += (population[np.random.randint(self.pop_size)] - population[i]) * np.random.rand()
  1038.                     population[i] += velocities[i]
  1039.  
  1040.                 # Ensure values are within [0, 1]
  1041.                 population[i] = np.clip(population[i], 0, 1)
  1042.  
  1043.                 # Calculate the fitness of the new solution
  1044.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1045.  
  1046.             # Track progress
  1047.             best_fitness = -np.min(fitness_scores)
  1048.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  1049.             generation_counter += 1
  1050.  
  1051.         # End time
  1052.         end_time = time.time()
  1053.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1054.  
  1055.         # Return the best solution found
  1056.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1057.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1058.  
  1059.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1060.  
  1061. """**Big Bang Big Crunch**"""
  1062.  
  1063. import numpy as np
  1064. from sklearn.model_selection import train_test_split
  1065. from sklearn.neighbors import KNeighborsClassifier
  1066. from sklearn.metrics import accuracy_score
  1067. import time
  1068.  
  1069. class BigBangBigCrunch:
  1070.     def __init__(self, X, y, pop_size=20, max_iter=10, explosion_rate=0.3):
  1071.         self.X = X
  1072.         self.y = y
  1073.         self.pop_size = pop_size
  1074.         self.max_iter = max_iter
  1075.         self.explosion_rate = explosion_rate
  1076.         self.features = X.shape[1]
  1077.  
  1078.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1079.         selected_features = np.where(solution > 0.5)[0]
  1080.         if len(selected_features) == 0:
  1081.             return 1  # Worst fitness if no features are selected
  1082.  
  1083.         model = KNeighborsClassifier(n_neighbors=3)
  1084.         model.fit(X_train[:, selected_features], y_train)
  1085.         y_pred = model.predict(X_test[:, selected_features])
  1086.         return -accuracy_score(y_test, y_pred)  # Minimize the negative accuracy
  1087.  
  1088.     def search(self):
  1089.         global generation_counter
  1090.         generation_counter = 0
  1091.  
  1092.         # Split the data into training and test sets
  1093.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1094.  
  1095.         # Initialize the population and compute initial fitness
  1096.         population = np.random.rand(self.pop_size, self.features)
  1097.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1098.  
  1099.         # Start time to monitor the duration of the optimization
  1100.         start_time = time.time()
  1101.  
  1102.         # Big Bang-Big Crunch Optimization Loop
  1103.         for generation in range(self.max_iter):
  1104.             best_fitness = -np.min(fitness_scores)
  1105.             average_fitness = np.mean(fitness_scores)
  1106.  
  1107.             # Big Bang (explosion): Randomly initialize new population
  1108.             if np.random.rand() < self.explosion_rate:
  1109.                 population = np.random.rand(self.pop_size, self.features)
  1110.                 fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1111.  
  1112.             # Big Crunch: Move population toward the center of mass based on fitness
  1113.             else:
  1114.                 for i in range(self.pop_size):
  1115.                     population[i] += (np.random.rand(self.features) - 0.5) * (average_fitness - fitness_scores[i])
  1116.                     population[i] = np.clip(population[i], 0, 1)
  1117.                     fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1118.  
  1119.             print(f"Generation {generation_counter}: Best fitness = {best_fitness}")
  1120.             generation_counter += 1
  1121.  
  1122.         # End time
  1123.         end_time = time.time()
  1124.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1125.  
  1126.         # Find and return the best solution found
  1127.         best_solution_index = np.argmin(fitness_scores)
  1128.         best_solution = population[best_solution_index]
  1129.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1130.  
  1131. """**Biogeography-based Optimization**"""
  1132.  
  1133. import numpy as np
  1134. from sklearn.model_selection import train_test_split
  1135. from sklearn.neighbors import KNeighborsClassifier
  1136. from sklearn.metrics import accuracy_score
  1137. import time
  1138.  
  1139. class Biogeography:
  1140.     def __init__(self, X, y, pop_size=20, max_iter=10, migration_rate=0.3):
  1141.         self.X = X
  1142.         self.y = y
  1143.         self.pop_size = pop_size
  1144.         self.max_iter = max_iter
  1145.         self.migration_rate = migration_rate
  1146.         self.generation_counter = 0
  1147.  
  1148.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1149.         selected_features = np.where(solution > 0.5)[0]
  1150.         if len(selected_features) == 0:
  1151.             return 1  # Worst fitness if no features are selected
  1152.         model = KNeighborsClassifier(n_neighbors=3)
  1153.         model.fit(X_train[:, selected_features], y_train)
  1154.         y_pred = model.predict(X_test[:, selected_features])
  1155.         return -accuracy_score(y_test, y_pred)
  1156.  
  1157.     def search(self):
  1158.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1159.         population = np.random.rand(self.pop_size, self.X.shape[1])
  1160.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1161.  
  1162.         start_time = time.time()
  1163.  
  1164.         for generation in range(self.max_iter):
  1165.             best_fitness = -np.min(fitness_scores)
  1166.             average_fitness = np.mean(fitness_scores)
  1167.  
  1168.             for i in range(self.pop_size):
  1169.                 if np.random.rand() < self.migration_rate:
  1170.                     # Migrate features from a better solution
  1171.                     donor_index = np.random.choice(np.flatnonzero(fitness_scores == np.min(fitness_scores)))
  1172.                     population[i] = population[donor_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1173.                     population[i] = np.clip(population[i], 0, 1)
  1174.                     fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1175.  
  1176.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1177.             self.generation_counter += 1
  1178.  
  1179.         end_time = time.time()
  1180.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1181.  
  1182.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1183.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1184.         # Extract the selected features from the best solution and return them
  1185.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1186.  
  1187. """**Tug of War Optimization**"""
  1188.  
  1189. import numpy as np
  1190. from sklearn.model_selection import train_test_split
  1191. from sklearn.neighbors import KNeighborsClassifier
  1192. from sklearn.metrics import accuracy_score
  1193. import time
  1194.  
  1195. class TugOfWar:
  1196.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1197.         self.X = X
  1198.         self.y = y
  1199.         self.pop_size = pop_size
  1200.         self.max_iter = max_iter
  1201.         self.generation_counter = 0
  1202.  
  1203.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1204.         selected_features = np.where(solution > 0.5)[0]
  1205.         if len(selected_features) == 0:
  1206.             return 1  # Assign worst fitness if no features are selected
  1207.         model = KNeighborsClassifier(n_neighbors=3)
  1208.         model.fit(X_train[:, selected_features], y_train)
  1209.         y_pred = model.predict(X_test[:, selected_features])
  1210.         return -accuracy_score(y_test, y_pred)
  1211.  
  1212.     def search(self):
  1213.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1214.         population = np.random.rand(self.pop_size, self.X.shape[1])
  1215.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1216.  
  1217.         start_time = time.time()
  1218.  
  1219.         for generation in range(self.max_iter):
  1220.             for i in range(self.pop_size):
  1221.                 if np.random.rand() < 0.5:
  1222.                     # Random mutation to introduce diversity
  1223.                     population[i] = np.random.rand(self.X.shape[1])
  1224.                 else:
  1225.                     # Update based on the best solution
  1226.                     best_index = np.argmin(fitness_scores)
  1227.                     population[i] = population[best_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1228.                     population[i] = np.clip(population[i], 0, 1)
  1229.  
  1230.                 # Recalculate fitness for the updated individual
  1231.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1232.  
  1233.             best_fitness = -np.min(fitness_scores)
  1234.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1235.             self.generation_counter += 1
  1236.  
  1237.         end_time = time.time()
  1238.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1239.  
  1240.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1241.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1242.         # Extract the selected features from the best solution
  1243.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1244.  
  1245. """**Water Cycle Optimization**"""
  1246.  
  1247. import numpy as np
  1248. from sklearn.model_selection import train_test_split
  1249. from sklearn.neighbors import KNeighborsClassifier
  1250. from sklearn.metrics import accuracy_score
  1251. import time
  1252.  
  1253. class WaterCycle:
  1254.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1255.         self.X = X
  1256.         self.y = y
  1257.         self.pop_size = pop_size
  1258.         self.max_iter = max_iter
  1259.         self.generation_counter = 0
  1260.  
  1261.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1262.         selected_features = np.where(solution > 0.5)[0]
  1263.         if len(selected_features) == 0:
  1264.             return 1  # Assign worst fitness if no features are selected
  1265.         model = KNeighborsClassifier(n_neighbors=3)
  1266.         model.fit(X_train[:, selected_features], y_train)
  1267.         y_pred = model.predict(X_test[:, selected_features])
  1268.         return -accuracy_score(y_test, y_pred)
  1269.  
  1270.     def search(self):
  1271.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1272.         population = np.random.rand(self.pop_size, self.X.shape[1])
  1273.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1274.  
  1275.         start_time = time.time()
  1276.  
  1277.         for generation in range(self.max_iter):
  1278.             for i in range(self.pop_size):
  1279.                 if np.random.rand() < 0.5:
  1280.                     # Water movement - introduce randomness
  1281.                     population[i] = np.random.rand(self.X.shape[1])
  1282.                 else:
  1283.                     # Move towards the best solution (simulating flow towards an optimal solution)
  1284.                     best_index = np.argmin(fitness_scores)
  1285.                     population[i] = population[best_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1286.                     population[i] = np.clip(population[i], 0, 1)
  1287.  
  1288.                 # Update fitness for the new solution
  1289.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1290.  
  1291.             best_fitness = -np.min(fitness_scores)
  1292.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1293.             self.generation_counter += 1
  1294.  
  1295.         end_time = time.time()
  1296.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1297.  
  1298.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1299.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1300.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1301.  
  1302. import numpy as np
  1303. from sklearn.model_selection import train_test_split
  1304. from sklearn.neighbors import KNeighborsClassifier
  1305. from sklearn.metrics import accuracy_score
  1306. import time
  1307.  
  1308. class WhaleOptimization:
  1309.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1310.         self.X = X
  1311.         self.y = y
  1312.         self.pop_size = pop_size
  1313.         self.max_iter = max_iter
  1314.         self.generation_counter = 0
  1315.  
  1316.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1317.         selected_features = np.where(solution > 0.5)[0]
  1318.         if len(selected_features) == 0:
  1319.             return 1  # Assign worst fitness if no features are selected
  1320.         model = KNeighborsClassifier(n_neighbors=3)
  1321.         model.fit(X_train[:, selected_features], y_train)
  1322.         y_pred = model.predict(X_test[:, selected_features])
  1323.         return -accuracy_score(y_test, y_pred)
  1324.  
  1325.     def search(self):
  1326.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1327.         population = np.random.rand(self.pop_size, self.X.shape[1])
  1328.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1329.  
  1330.         start_time = time.time()
  1331.  
  1332.         for generation in range(self.max_iter):
  1333.             for i in range(self.pop_size):
  1334.                 best_index = np.argmin(fitness_scores)
  1335.                 r = np.random.rand()
  1336.                 A = 2 * np.random.rand() - 1
  1337.                 C = 2 * np.random.rand()
  1338.  
  1339.                 if r < 0.5:
  1340.                     population[i] = population[best_index] - A * np.abs(C * population[best_index] - population[i])
  1341.                 else:
  1342.                     population[i] = population[best_index] + A * np.abs(C * population[best_index] - population[i])
  1343.  
  1344.                 # Ensure values remain in the range [0, 1]
  1345.                 population[i] = np.clip(population[i], 0, 1)
  1346.                 # Update fitness score for the new solution
  1347.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1348.  
  1349.             best_fitness = -np.min(fitness_scores)
  1350.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1351.             self.generation_counter += 1
  1352.  
  1353.         end_time = time.time()
  1354.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1355.  
  1356.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1357.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1358.  
  1359.         # Extract the selected features from the best solution
  1360.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1361.  
  1362. import numpy as np
  1363. from sklearn.model_selection import train_test_split
  1364. from sklearn.neighbors import KNeighborsClassifier
  1365. from sklearn.metrics import accuracy_score
  1366. import time
  1367.  
  1368. class WhaleSwarmOptimization:
  1369.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1370.         self.X = X
  1371.         self.y = y
  1372.         self.pop_size = pop_size
  1373.         self.max_iter = max_iter
  1374.         self.generation_counter = 0
  1375.  
  1376.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  1377.         selected_features = np.where(solution > 0.5)[0]
  1378.         if len(selected_features) == 0:
  1379.             return 1  # Assign worst fitness if no features are selected
  1380.         model = KNeighborsClassifier(n_neighbors=3)
  1381.         model.fit(X_train[:, selected_features], y_train)
  1382.         y_pred = model.predict(X_test[:, selected_features])
  1383.         return -accuracy_score(y_test, y_pred)
  1384.  
  1385.     def search(self):
  1386.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  1387.         population = np.random.rand(self.pop_size, self.X.shape[1])
  1388.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  1389.  
  1390.         start_time = time.time()
  1391.  
  1392.         for generation in range(self.max_iter):
  1393.             for i in range(self.pop_size):
  1394.                 r = np.random.rand()
  1395.                 if r < 0.5:
  1396.                     best_index = np.argmin(fitness_scores)
  1397.                     A = np.random.rand()
  1398.                     population[i] = population[best_index] + A * np.abs(population[best_index] - population[i])
  1399.                 else:
  1400.                     worst_index = np.argmax(fitness_scores)
  1401.                     A = np.random.rand()
  1402.                     population[i] = population[worst_index] - A * np.abs(population[worst_index] - population[i])
  1403.  
  1404.                 # Ensure values remain in the range [0, 1]
  1405.                 population[i] = np.clip(population[i], 0, 1)
  1406.                 # Update fitness score for the new solution
  1407.                 fitness_scores[i] = self.fitness_function(population[i], X_train, X_test, y_train, y_test)
  1408.  
  1409.             best_fitness = -np.min(fitness_scores)
  1410.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1411.             self.generation_counter += 1
  1412.  
  1413.         end_time = time.time()
  1414.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1415.  
  1416.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1417.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  1418.  
  1419.         # Extract the selected features from the best solution
  1420.         return [features[i] for i in np.where(best_solution > 0.5)[0]]
  1421.  
  1422. import numpy as np
  1423. from sklearn.model_selection import train_test_split
  1424. from sklearn.neighbors import KNeighborsClassifier
  1425. from sklearn.metrics import accuracy_score
  1426. import time
  1427.  
  1428. class CatSwarmOptimizer:
  1429.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1430.         self.X = X
  1431.         self.y = y
  1432.         self.pop_size = pop_size
  1433.         self.max_iter = max_iter
  1434.         self.generation_counter = 0
  1435.         self.population = np.random.rand(pop_size, X.shape[1])
  1436.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1437.  
  1438.     def fitness_function(self, solution):
  1439.         selected_features = np.where(solution > 0.5)[0]
  1440.         if len(selected_features) == 0:
  1441.             return 1  # Penalize solutions with no selected features
  1442.         model = KNeighborsClassifier(n_neighbors=3)
  1443.         model.fit(self.X_train[:, selected_features], self.y_train)
  1444.         y_pred = model.predict(self.X_test[:, selected_features])
  1445.         return -accuracy_score(self.y_test, y_pred)
  1446.  
  1447.     def search(self):
  1448.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1449.         start_time = time.time()
  1450.  
  1451.         for generation in range(self.max_iter):
  1452.             for i in range(self.pop_size):
  1453.                 if np.random.rand() < 0.5:
  1454.                     # Exploration phase
  1455.                     self.population[i] = np.random.rand(self.X.shape[1])
  1456.                 else:
  1457.                     # Exploitation phase
  1458.                     best_index = np.argmin(fitness_scores)
  1459.                     self.population[i] = self.population[best_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1460.                     self.population[i] = np.clip(self.population[i], 0, 1)  # Ensure values stay within [0, 1]
  1461.  
  1462.                 # Update fitness score for the new solution
  1463.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1464.  
  1465.             best_fitness = -np.min(fitness_scores)
  1466.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1467.             self.generation_counter += 1
  1468.  
  1469.         end_time = time.time()
  1470.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1471.  
  1472.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1473.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1474.  
  1475.         # Extract the selected features from the best solution
  1476.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1477.         selected_features = [features[i] for i in selected_indices]
  1478.         return selected_features
  1479.  
  1480. import numpy as np
  1481. from sklearn.model_selection import train_test_split
  1482. from sklearn.neighbors import KNeighborsClassifier
  1483. from sklearn.metrics import accuracy_score
  1484. import time
  1485.  
  1486. class ChickenSwarmOptimizer:
  1487.     def __init__(self, X, y , pop_size=20, max_iter=10):
  1488.         self.X = X
  1489.         self.y = y
  1490.         self.pop_size = pop_size
  1491.         self.max_iter = max_iter
  1492.         self.generation_counter = 0
  1493.         self.population = np.random.rand(pop_size, X.shape[1])
  1494.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1495.  
  1496.     def fitness_function(self, solution):
  1497.         selected_features = np.where(solution > 0.5)[0]
  1498.         if len(selected_features) == 0:
  1499.             return 1  # Penalize solutions with no selected features
  1500.         model = KNeighborsClassifier(n_neighbors=3)
  1501.         model.fit(self.X_train[:, selected_features], self.y_train)
  1502.         y_pred = model.predict(self.X_test[:, selected_features])
  1503.         return -accuracy_score(self.y_test, y_pred)
  1504.  
  1505.     def search(self):
  1506.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1507.         start_time = time.time()
  1508.  
  1509.         for generation in range(self.max_iter):
  1510.             for i in range(self.pop_size):
  1511.                 if np.random.rand() < 0.5:
  1512.                     # Exploration phase
  1513.                     self.population[i] = np.random.rand(self.X.shape[1])
  1514.                 else:
  1515.                     # Exploitation phase
  1516.                     best_index = np.argmin(fitness_scores)
  1517.                     self.population[i] = self.population[best_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1518.                     self.population[i] = np.clip(self.population[i], 0, 1)  # Ensure values stay within [0, 1]
  1519.  
  1520.                 # Update fitness score for the new solution
  1521.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1522.  
  1523.             best_fitness = -np.min(fitness_scores)
  1524.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1525.             self.generation_counter += 1
  1526.  
  1527.         end_time = time.time()
  1528.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1529.  
  1530.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1531.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1532.  
  1533.         # Extract the selected features from the best solution
  1534.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1535.         selected_features = [features[i] for i in selected_indices]
  1536.         return selected_features
  1537.  
  1538. import numpy as np
  1539. from sklearn.model_selection import train_test_split
  1540. from sklearn.neighbors import KNeighborsClassifier
  1541. from sklearn.metrics import accuracy_score
  1542. import time
  1543.  
  1544. class ClonalSelectionOptimizer:
  1545.     def __init__(self, X, y , pop_size=20, max_iter=10):
  1546.         self.X = X
  1547.         self.y = y
  1548.         self.pop_size = pop_size
  1549.         self.max_iter = max_iter
  1550.         self.generation_counter = 0
  1551.         self.population = np.random.rand(pop_size, X.shape[1])
  1552.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1553.  
  1554.     def fitness_function(self, solution):
  1555.         selected_features = np.where(solution > 0.5)[0]
  1556.         if len(selected_features) == 0:
  1557.             return 1  # Penalize solutions with no selected features
  1558.         model = KNeighborsClassifier(n_neighbors=3)
  1559.         model.fit(self.X_train[:, selected_features], self.y_train)
  1560.         y_pred = model.predict(self.X_test[:, selected_features])
  1561.         return -accuracy_score(self.y_test, y_pred)
  1562.  
  1563.     def search(self):
  1564.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1565.         start_time = time.time()
  1566.  
  1567.         for generation in range(self.max_iter):
  1568.             best_index = np.argmin(fitness_scores)  # Index of the best solution
  1569.             for i in range(self.pop_size):
  1570.                 # Cloning and mutation
  1571.                 if i == best_index:
  1572.                     self.population[i] += np.random.normal(0, 0.1, size=self.X.shape[1])
  1573.                 else:
  1574.                     self.population[i] += np.random.rand(self.X.shape[1]) * (self.population[best_index] - self.population[i])
  1575.  
  1576.                 self.population[i] = np.clip(self.population[i], 0, 1)  # Ensure values stay within [0, 1]
  1577.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1578.  
  1579.             best_fitness = -np.min(fitness_scores)
  1580.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1581.             self.generation_counter += 1
  1582.  
  1583.         end_time = time.time()
  1584.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1585.  
  1586.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1587.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1588.  
  1589.         # Extract the selected features from the best solution
  1590.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1591.         selected_features = [features[i] for i in selected_indices]
  1592.         return selected_features
  1593.  
  1594. import numpy as np
  1595. from sklearn.model_selection import train_test_split
  1596. from sklearn.neighbors import KNeighborsClassifier
  1597. from sklearn.metrics import accuracy_score
  1598. import time
  1599.  
  1600. class CoralReefsOptimizer:
  1601.     def __init__(self, X, y , pop_size=20, max_iter=10):
  1602.         self.X = X
  1603.         self.y = y
  1604.         self.pop_size = pop_size
  1605.         self.max_iter = max_iter
  1606.         self.generation_counter = 0
  1607.         self.population = np.random.rand(pop_size, X.shape[1])
  1608.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1609.  
  1610.     def fitness_function(self, solution):
  1611.         selected_features = np.where(solution > 0.5)[0]
  1612.         if len(selected_features) == 0:
  1613.             return 1  # Penalize solutions with no selected features
  1614.         model = KNeighborsClassifier(n_neighbors=3)
  1615.         model.fit(self.X_train[:, selected_features], self.y_train)
  1616.         y_pred = model.predict(self.X_test[:, selected_features])
  1617.         return -accuracy_score(self.y_test, y_pred)
  1618.  
  1619.     def search(self):
  1620.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1621.         start_time = time.time()
  1622.  
  1623.         for generation in range(self.max_iter):
  1624.             best_index = np.argmin(fitness_scores)  # Index of the best solution
  1625.             for i in range(self.pop_size):
  1626.                 # Update positions based on the best coral
  1627.                 self.population[i] = self.population[best_index] + np.random.normal(0, 0.1, size=self.X.shape[1])
  1628.                 self.population[i] = np.clip(self.population[i], 0, 1)  # Ensure values stay within [0, 1]
  1629.  
  1630.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1631.  
  1632.             best_fitness = -np.min(fitness_scores)
  1633.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1634.             self.generation_counter += 1
  1635.  
  1636.         end_time = time.time()
  1637.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1638.  
  1639.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1640.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1641.  
  1642.         # Extract the selected features from the best solution
  1643.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1644.         selected_features = [features[i] for i in selected_indices]
  1645.         return selected_features
  1646.  
  1647. import numpy as np
  1648. from sklearn.model_selection import train_test_split
  1649. from sklearn.neighbors import KNeighborsClassifier
  1650. from sklearn.metrics import accuracy_score
  1651. import time
  1652.  
  1653. class FireworkOptimization:
  1654.     def __init__(self, X, y , pop_size=20, max_iter=10, explosion_strength=0.2):
  1655.         self.X = X
  1656.         self.y = y
  1657.         self.pop_size = pop_size
  1658.         self.max_iter = max_iter
  1659.         self.explosion_strength = explosion_strength
  1660.         self.generation_counter = 0
  1661.         self.population = np.random.rand(pop_size, X.shape[1])
  1662.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1663.  
  1664.     def fitness_function(self, solution):
  1665.         selected_features = np.where(solution > 0.5)[0]
  1666.         if len(selected_features) == 0:
  1667.             return 1  # Penalize solutions with no selected features
  1668.         model = KNeighborsClassifier(n_neighbors=3)
  1669.         model.fit(self.X_train[:, selected_features], self.y_train)
  1670.         y_pred = model.predict(self.X_test[:, selected_features])
  1671.         return -accuracy_score(self.y_test, y_pred)
  1672.  
  1673.     def search(self):
  1674.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1675.         start_time = time.time()
  1676.  
  1677.         for generation in range(self.max_iter):
  1678.             for i in range(self.pop_size):
  1679.                 # Generate sparks based on the fitness score
  1680.                 num_sparks = int(self.pop_size * (1 - fitness_scores[i]))
  1681.                 sparks = np.array([self.population[i] + np.random.normal(0, self.explosion_strength, size=self.X.shape[1]) for _ in range(num_sparks)])
  1682.  
  1683.                 for spark in sparks:
  1684.                     spark = np.clip(spark, 0, 1)  # Ensure sparks stay within [0, 1]
  1685.                     fitness = self.fitness_function(spark)
  1686.  
  1687.                     if fitness < fitness_scores[i]:
  1688.                         self.population[i] = spark
  1689.                         fitness_scores[i] = fitness
  1690.  
  1691.             best_fitness = -np.min(fitness_scores)
  1692.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1693.             self.generation_counter += 1
  1694.  
  1695.         end_time = time.time()
  1696.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1697.  
  1698.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1699.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1700.  
  1701.         # Extract the selected features from the best solution
  1702.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1703.         selected_features = [features[i] for i in selected_indices]
  1704.         return selected_features
  1705.  
  1706. import numpy as np
  1707. from sklearn.model_selection import train_test_split
  1708. from sklearn.neighbors import KNeighborsClassifier
  1709. from sklearn.metrics import accuracy_score
  1710. import time
  1711.  
  1712. class FlowerPollination:
  1713.     def __init__(self, X, y , pop_size=20, max_iter=10, p=0.8):
  1714.         self.X = X
  1715.         self.y = y
  1716.         self.pop_size = pop_size
  1717.         self.max_iter = max_iter
  1718.         self.p = p
  1719.         self.generation_counter = 0
  1720.         self.population = np.random.rand(pop_size, X.shape[1])
  1721.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1722.  
  1723.     def fitness_function(self, solution):
  1724.         selected_features = np.where(solution > 0.5)[0]
  1725.         if len(selected_features) == 0:
  1726.             return 1  # Penalize solutions with no selected features
  1727.         model = KNeighborsClassifier(n_neighbors=3)
  1728.         model.fit(self.X_train[:, selected_features], self.y_train)
  1729.         y_pred = model.predict(self.X_test[:, selected_features])
  1730.         return -accuracy_score(self.y_test, y_pred)
  1731.  
  1732.     def search(self):
  1733.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1734.         start_time = time.time()
  1735.  
  1736.         for generation in range(self.max_iter):
  1737.             for i in range(self.pop_size):
  1738.                 if np.random.rand() < self.p:
  1739.                     # Global pollination
  1740.                     best_index = np.argmin(fitness_scores)
  1741.                     self.population[i] += np.random.normal(0, 0.1, size=self.X.shape[1]) * (self.population[best_index] - self.population[i])
  1742.                 else:
  1743.                     # Local pollination
  1744.                     j = np.random.randint(self.pop_size)
  1745.                     self.population[i] += np.random.normal(0, 0.1, size=self.X.shape[1]) * (self.population[j] - self.population[i])
  1746.  
  1747.                 self.population[i] = np.clip(self.population[i], 0, 1)
  1748.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1749.  
  1750.             best_fitness = -np.min(fitness_scores)
  1751.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1752.             self.generation_counter += 1
  1753.  
  1754.         end_time = time.time()
  1755.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1756.  
  1757.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1758.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1759.  
  1760.         # Extract the selected features from the best solution
  1761.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1762.         selected_features = [features[i] for i in selected_indices]
  1763.         return selected_features
  1764.  
  1765. import numpy as np
  1766. from sklearn.model_selection import train_test_split
  1767. from sklearn.neighbors import KNeighborsClassifier
  1768. from sklearn.metrics import accuracy_score
  1769. import time
  1770.  
  1771. class GravitationalSearch:
  1772.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1773.         self.X = X
  1774.         self.y = y
  1775.         self.pop_size = pop_size
  1776.         self.max_iter = max_iter
  1777.         self.generation_counter = 0
  1778.         self.population = np.random.rand(pop_size, X.shape[1])
  1779.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1780.  
  1781.     def fitness_function(self, solution):
  1782.         selected_features = np.where(solution > 0.5)[0]
  1783.         if len(selected_features) == 0:
  1784.             return 1  # Penalize solutions with no selected features
  1785.         model = KNeighborsClassifier(n_neighbors=3)
  1786.         model.fit(self.X_train[:, selected_features], self.y_train)
  1787.         y_pred = model.predict(self.X_test[:, selected_features])
  1788.         return -accuracy_score(self.y_test, y_pred)
  1789.  
  1790.     def search(self):
  1791.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1792.         start_time = time.time()
  1793.  
  1794.         for generation in range(self.max_iter):
  1795.             total_fitness = np.sum(fitness_scores)
  1796.             gravitational_force = (fitness_scores / total_fitness).reshape(-1, 1)
  1797.             for i in range(self.pop_size):
  1798.                 for j in range(self.pop_size):
  1799.                     if fitness_scores[j] < fitness_scores[i]:  # Attractive force
  1800.                         force = gravitational_force[j] / np.linalg.norm(self.population[i] - self.population[j])
  1801.                         self.population[i] += force * (self.population[j] - self.population[i])
  1802.  
  1803.                 self.population[i] = np.clip(self.population[i], 0, 1)
  1804.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1805.  
  1806.             best_fitness = -np.min(fitness_scores)
  1807.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1808.             self.generation_counter += 1
  1809.  
  1810.         end_time = time.time()
  1811.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1812.  
  1813.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1814.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1815.  
  1816.         # Extract the selected features from the best solution
  1817.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1818.         selected_features = [features[i] for i in selected_indices]
  1819.         return selected_features
  1820.  
  1821. import numpy as np
  1822. from sklearn.model_selection import train_test_split
  1823. from sklearn.neighbors import KNeighborsClassifier
  1824. from sklearn.metrics import accuracy_score
  1825. import time
  1826.  
  1827. class GrayWolfOptimization:
  1828.     def __init__(self, X, y, features, pop_size=20, max_iter=10):
  1829.         self.X = X
  1830.         self.y = y
  1831.         self.features = features
  1832.         self.pop_size = pop_size
  1833.         self.max_iter = max_iter
  1834.         self.generation_counter = 0
  1835.         self.population = np.random.rand(pop_size, X.shape[1])
  1836.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1837.  
  1838.     def fitness_function(self, solution):
  1839.         selected_features = np.where(solution > 0.5)[0]
  1840.         if len(selected_features) == 0:
  1841.             return 1  # Penalize solutions with no selected features
  1842.         model = KNeighborsClassifier(n_neighbors=3)
  1843.         model.fit(self.X_train[:, selected_features], self.y_train)
  1844.         y_pred = model.predict(self.X_test[:, selected_features])
  1845.         return -accuracy_score(self.y_test, y_pred)
  1846.  
  1847.     def search(self):
  1848.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1849.         start_time = time.time()
  1850.  
  1851.         for generation in range(self.max_iter):
  1852.             alpha_index = np.argmin(fitness_scores)
  1853.             alpha = self.population[alpha_index]
  1854.             a = 2 - generation * (2 / self.max_iter)
  1855.  
  1856.             for i in range(self.pop_size):
  1857.                 for j in range(3):  # Update using alpha, beta, and delta wolves
  1858.                     r = np.random.rand(self.X.shape[1])
  1859.                     A = 2 * a * r - a
  1860.                     C = 2 * np.random.rand(self.X.shape[1])
  1861.                     D = np.abs(C * alpha - self.population[i])
  1862.                     self.population[i] = alpha - A * D
  1863.  
  1864.                 self.population[i] = np.clip(self.population[i], 0, 1)
  1865.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1866.  
  1867.             best_fitness = -np.min(fitness_scores)
  1868.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1869.             self.generation_counter += 1
  1870.  
  1871.         end_time = time.time()
  1872.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1873.  
  1874.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1875.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1876.  
  1877.         # Extract the selected features from the best solution
  1878.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1879.         selected_features = [features[i] for i in selected_indices]
  1880.         return selected_features
  1881.  
  1882. import numpy as np
  1883. from sklearn.model_selection import train_test_split
  1884. from sklearn.neighbors import KNeighborsClassifier
  1885. from sklearn.metrics import accuracy_score
  1886. import time
  1887.  
  1888. class GreenHeronsOptimization:
  1889.     def __init__(self, X, y, pop_size=20, max_iter=10, p=0.5):
  1890.         self.X = X
  1891.         self.y = y
  1892.         self.pop_size = pop_size
  1893.         self.max_iter = max_iter
  1894.         self.p = p
  1895.         self.generation_counter = 0
  1896.         self.population = np.random.rand(pop_size, X.shape[1])
  1897.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1898.  
  1899.     def fitness_function(self, solution):
  1900.         selected_features = np.where(solution > 0.5)[0]
  1901.         if len(selected_features) == 0:
  1902.             return 1  # Penalize solutions with no selected features
  1903.         model = KNeighborsClassifier(n_neighbors=3)
  1904.         model.fit(self.X_train[:, selected_features], self.y_train)
  1905.         y_pred = model.predict(self.X_test[:, selected_features])
  1906.         return -accuracy_score(self.y_test, y_pred)
  1907.  
  1908.     def search(self):
  1909.         fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1910.         start_time = time.time()
  1911.  
  1912.         for generation in range(self.max_iter):
  1913.             for i in range(self.pop_size):
  1914.                 if np.random.rand() < self.p:
  1915.                     # Update by foraging behavior
  1916.                     self.population[i] += np.random.normal(0, 0.1, size=self.X.shape[1]) * (np.random.rand() * (self.population.max(axis=0) - self.population[i]))
  1917.                 else:
  1918.                     # Update by avoiding predators
  1919.                     self.population[i] += np.random.normal(0, 0.1, size=self.X.shape[1]) * (np.random.rand() * (self.population.min(axis=0) - self.population[i]))
  1920.  
  1921.                 self.population[i] = np.clip(self.population[i], 0, 1)
  1922.                 fitness_scores[i] = self.fitness_function(self.population[i])
  1923.  
  1924.             best_fitness = -np.min(fitness_scores)
  1925.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  1926.             self.generation_counter += 1
  1927.  
  1928.         end_time = time.time()
  1929.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  1930.  
  1931.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  1932.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  1933.  
  1934.         # Extract the selected features from the best solution
  1935.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  1936.         selected_features = [features[i] for i in selected_indices]
  1937.         return selected_features
  1938.  
  1939. import numpy as np
  1940. from sklearn.model_selection import train_test_split
  1941. from sklearn.neighbors import KNeighborsClassifier
  1942. from sklearn.metrics import accuracy_score
  1943. import time
  1944.  
  1945. class GreyWolfOptimizer:
  1946.     def __init__(self, X, y, pop_size=20, max_iter=10):
  1947.         self.X = X
  1948.         self.y = y
  1949.         self.pop_size = pop_size
  1950.         self.max_iter = max_iter
  1951.         self.generation_counter = 0
  1952.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  1953.         self.population = np.random.rand(pop_size, X.shape[1])
  1954.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  1955.  
  1956.     def fitness_function(self, solution):
  1957.         selected_features = np.where(solution > 0.5)[0]
  1958.         if len(selected_features) == 0:
  1959.             return 1  # Penalize solutions with no selected features
  1960.         model = KNeighborsClassifier(n_neighbors=3)
  1961.         model.fit(self.X_train[:, selected_features], self.y_train)
  1962.         y_pred = model.predict(self.X_test[:, selected_features])
  1963.         return -accuracy_score(self.y_test, y_pred)
  1964.  
  1965.     def search(self):
  1966.         start_time = time.time()
  1967.  
  1968.         for generation in range(self.max_iter):
  1969.             alpha_index = np.argmin(self.fitness_scores)
  1970.             beta_index = np.argsort(self.fitness_scores)[1]
  1971.             delta_index = np.argsort(self.fitness_scores)[2]
  1972.  
  1973.             alpha = self.population[alpha_index]
  1974.             beta = self.population[beta_index]
  1975.             delta = self.population[delta_index]
  1976.  
  1977.             a = 2 - generation * (2 / self.max_iter)
  1978.  
  1979.             for i in range(self.pop_size):
  1980.                 r1 = np.random.rand(self.X.shape[1])
  1981.                 r2 = np.random.rand(self.X.shape[1])
  1982.                 r3 = np.random.rand(self.X.shape[1])
  1983.  
  1984.                 A1 = 2 * a * r1 - a
  1985.                 C1 = 2 * r2
  1986.                 D_alpha = np.abs(C1 * alpha - self.population[i])
  1987.                 self.population[i] = alpha - A1 * D_alpha
  1988.  
  1989.                 A2 = 2 * a * r1 - a
  1990.                 C2 = 2 * r2
  1991.                 D_beta = np.abs(C2 * beta - self.population[i])
  1992.                 self.population[i] = beta - A2 * D_beta
  1993.  
  1994.                 A3 = 2 * a * r1 - a
  1995.                 C3 = 2 * r2
  1996.                 D_delta = np.abs(C3 * delta - self.population[i])
  1997.                 self.population[i] = delta - A3 * D_delta
  1998.  
  1999.                 self.population[i] = np.clip(self.population[i], 0, 1)
  2000.                 self.fitness_scores[i] = self.fitness_function(self.population[i])
  2001.  
  2002.             best_fitness = -np.min(self.fitness_scores)
  2003.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2004.             self.generation_counter += 1
  2005.  
  2006.         end_time = time.time()
  2007.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2008.  
  2009.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2010.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2011.  
  2012.         # Extract the selected features from the best solution
  2013.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2014.         selected_features = [features[i] for i in selected_indices]
  2015.         return selected_features
  2016.  
  2017. import numpy as np
  2018. from sklearn.model_selection import train_test_split
  2019. from sklearn.neighbors import KNeighborsClassifier
  2020. from sklearn.metrics import accuracy_score
  2021. import time
  2022.  
  2023. class HarmonySearch:
  2024.     def __init__(self, X, y, pop_size=20, max_iter=10, harmony_memory_size=10):
  2025.         self.X = X
  2026.         self.y = y
  2027.         self.pop_size = pop_size
  2028.         self.max_iter = max_iter
  2029.         self.harmony_memory_size = harmony_memory_size
  2030.         self.generation_counter = 0
  2031.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2032.         self.harmony_memory = np.random.rand(harmony_memory_size, X.shape[1])
  2033.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.harmony_memory])
  2034.  
  2035.     def fitness_function(self, solution):
  2036.         selected_features = np.where(solution > 0.5)[0]
  2037.         if len(selected_features) == 0:
  2038.             return 1  # Penalize solutions with no selected features
  2039.         model = KNeighborsClassifier(n_neighbors=3)
  2040.         model.fit(self.X_train[:, selected_features], self.y_train)
  2041.         y_pred = model.predict(self.X_test[:, selected_features])
  2042.         return -accuracy_score(self.y_test, y_pred)
  2043.  
  2044.     def search(self):
  2045.         start_time = time.time()
  2046.  
  2047.         for generation in range(self.max_iter):
  2048.             new_harmony = np.zeros(self.X.shape[1])
  2049.             for i in range(self.X.shape[1]):
  2050.                 if np.random.rand() < 0.5:  # Choose from harmony memory
  2051.                     idx = np.random.randint(0, self.harmony_memory_size)
  2052.                     new_harmony[i] = self.harmony_memory[idx, i]
  2053.                 else:  # Random choice
  2054.                     new_harmony[i] = np.random.rand()
  2055.  
  2056.             new_harmony = np.clip(new_harmony, 0, 1)
  2057.             new_fitness = self.fitness_function(new_harmony)
  2058.  
  2059.             # Update harmony memory
  2060.             if new_fitness < np.max(self.fitness_scores):
  2061.                 worst_index = np.argmax(self.fitness_scores)
  2062.                 self.harmony_memory[worst_index] = new_harmony
  2063.                 self.fitness_scores[worst_index] = new_fitness
  2064.  
  2065.             best_fitness = -np.min(self.fitness_scores)
  2066.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2067.             self.generation_counter += 1
  2068.  
  2069.         end_time = time.time()
  2070.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2071.  
  2072.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2073.         best_solution = self.harmony_memory[best_solution_index]  # The solution itself (binary array)
  2074.  
  2075.         # Extract the selected features from the best solution
  2076.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2077.         selected_features = [features[i] for i in selected_indices]
  2078.         return selected_features
  2079.  
  2080. import numpy as np
  2081. from sklearn.model_selection import train_test_split
  2082. from sklearn.neighbors import KNeighborsClassifier
  2083. from sklearn.metrics import accuracy_score
  2084. import time
  2085.  
  2086. class HarrisHawk:
  2087.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2088.         self.X = X
  2089.         self.y = y
  2090.         self.pop_size = pop_size
  2091.         self.max_iter = max_iter
  2092.         self.generation_counter = 0
  2093.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2094.         self.population = np.random.rand(pop_size, X.shape[1])
  2095.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2096.  
  2097.     def fitness_function(self, solution):
  2098.         selected_features = np.where(solution > 0.5)[0]
  2099.         if len(selected_features) == 0:
  2100.             return 1  # Penalize solutions with no selected features
  2101.         model = KNeighborsClassifier(n_neighbors=3)
  2102.         model.fit(self.X_train[:, selected_features], self.y_train)
  2103.         y_pred = model.predict(self.X_test[:, selected_features])
  2104.         return -accuracy_score(self.y_test, y_pred)
  2105.  
  2106.     def search(self):
  2107.         start_time = time.time()
  2108.  
  2109.         for generation in range(self.max_iter):
  2110.             for i in range(self.pop_size):
  2111.                 r = np.random.rand(self.X.shape[1])
  2112.                 A = np.random.rand()
  2113.                 new_position = self.population[i] + A * (self.population[np.random.randint(self.pop_size)] - self.population[i]) + r
  2114.  
  2115.                 new_position = np.clip(new_position, 0, 1)
  2116.                 new_fitness = self.fitness_function(new_position)
  2117.  
  2118.                 if new_fitness < self.fitness_scores[i]:
  2119.                     self.population[i] = new_position
  2120.                     self.fitness_scores[i] = new_fitness
  2121.  
  2122.             best_fitness = -np.min(self.fitness_scores)
  2123.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2124.             self.generation_counter += 1
  2125.  
  2126.         end_time = time.time()
  2127.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2128.  
  2129.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2130.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2131.  
  2132.         # Extract the selected features from the best solution
  2133.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2134.         selected_features = [features[i] for i in selected_indices]
  2135.         return selected_features
  2136.  
  2137. import numpy as np
  2138. from sklearn.model_selection import train_test_split
  2139. from sklearn.neighbors import KNeighborsClassifier
  2140. from sklearn.metrics import accuracy_score
  2141. import time
  2142.  
  2143. class HenryGasSolubility:
  2144.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2145.         self.X = X
  2146.         self.y = y
  2147.         self.pop_size = pop_size
  2148.         self.max_iter = max_iter
  2149.         self.generation_counter = 0
  2150.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2151.         self.population = np.random.rand(pop_size, X.shape[1])
  2152.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2153.  
  2154.     def fitness_function(self, solution):
  2155.         selected_features = np.where(solution > 0.5)[0]
  2156.         if len(selected_features) == 0:
  2157.             return 1  # Penalize solutions with no selected features
  2158.         model = KNeighborsClassifier(n_neighbors=3)
  2159.         model.fit(self.X_train[:, selected_features], self.y_train)
  2160.         y_pred = model.predict(self.X_test[:, selected_features])
  2161.         return -accuracy_score(self.y_test, y_pred)
  2162.  
  2163.     def search(self):
  2164.         start_time = time.time()
  2165.  
  2166.         for generation in range(self.max_iter):
  2167.             for i in range(self.pop_size):
  2168.                 A = np.random.rand()  # Random coefficient
  2169.                 new_position = self.population[i] + A * (self.population[np.random.randint(self.pop_size)] - self.population[i])
  2170.                 new_position = np.clip(new_position, 0, 1)
  2171.                 new_fitness = self.fitness_function(new_position)
  2172.  
  2173.                 if new_fitness < self.fitness_scores[i]:
  2174.                     self.population[i] = new_position
  2175.                     self.fitness_scores[i] = new_fitness
  2176.  
  2177.             best_fitness = -np.min(self.fitness_scores)
  2178.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2179.             self.generation_counter += 1
  2180.  
  2181.         end_time = time.time()
  2182.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2183.  
  2184.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2185.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2186.  
  2187.         # Extract the selected features from the best solution
  2188.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2189.         selected_features = [features[i] for i in selected_indices]
  2190.         return selected_features
  2191.  
  2192. import numpy as np
  2193. from sklearn.model_selection import train_test_split
  2194. from sklearn.neighbors import KNeighborsClassifier
  2195. from sklearn.metrics import accuracy_score
  2196. import time
  2197.  
  2198. class InvasiveWeed:
  2199.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2200.         self.X = X
  2201.         self.y = y
  2202.         self.pop_size = pop_size
  2203.         self.max_iter = max_iter
  2204.         self.generation_counter = 0
  2205.         self.selected_features = None
  2206.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2207.         self.population = np.random.rand(pop_size, X.shape[1])
  2208.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2209.  
  2210.     def fitness_function(self, solution):
  2211.         selected_features = np.where(solution > 0.5)[0]
  2212.         if len(selected_features) == 0:
  2213.             return 1  # Penalize solutions with no selected features
  2214.         model = KNeighborsClassifier(n_neighbors=3)
  2215.         model.fit(self.X_train[:, selected_features], self.y_train)
  2216.         y_pred = model.predict(self.X_test[:, selected_features])
  2217.         return -accuracy_score(self.y_test, y_pred)
  2218.  
  2219.     def search(self):
  2220.         start_time = time.time()
  2221.  
  2222.         for generation in range(self.max_iter):
  2223.             for i in range(self.pop_size):
  2224.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2225.                 new_position = np.clip(new_position, 0, 1)
  2226.                 new_fitness = self.fitness_function(new_position)
  2227.  
  2228.                 if new_fitness < self.fitness_scores[i]:
  2229.                     self.population[i] = new_position
  2230.                     self.fitness_scores[i] = new_fitness
  2231.  
  2232.             best_fitness = -np.min(self.fitness_scores)
  2233.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2234.             self.generation_counter += 1
  2235.  
  2236.         end_time = time.time()
  2237.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2238.  
  2239.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2240.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2241.  
  2242.         # Extract the selected features from the best solution
  2243.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2244.         selected_features = [features[i] for i in selected_indices]
  2245.         return selected_features  # Return the indices of selected features
  2246.  
  2247. import numpy as np
  2248. from sklearn.model_selection import train_test_split
  2249. from sklearn.neighbors import KNeighborsClassifier
  2250. from sklearn.metrics import accuracy_score
  2251. import time
  2252.  
  2253. class KrillHerd:
  2254.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2255.         self.X = X
  2256.         self.y = y
  2257.         self.pop_size = pop_size
  2258.         self.max_iter = max_iter
  2259.         self.generation_counter = 0
  2260.         self.selected_features = None
  2261.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2262.         self.population = np.random.rand(pop_size, X.shape[1])
  2263.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2264.  
  2265.     def fitness_function(self, solution):
  2266.         selected_features = np.where(solution > 0.5)[0]
  2267.         if len(selected_features) == 0:
  2268.             return 1  # Penalize solutions with no selected features
  2269.         model = KNeighborsClassifier(n_neighbors=3)
  2270.         model.fit(self.X_train[:, selected_features], self.y_train)
  2271.         y_pred = model.predict(self.X_test[:, selected_features])
  2272.         return -accuracy_score(self.y_test, y_pred)
  2273.  
  2274.     def search(self):
  2275.         start_time = time.time()
  2276.  
  2277.         for generation in range(self.max_iter):
  2278.             for i in range(self.pop_size):
  2279.                 # Create a new position by perturbing the current position
  2280.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2281.                 new_position = np.clip(new_position, 0, 1)
  2282.                 new_fitness = self.fitness_function(new_position)
  2283.  
  2284.                 # Update the position if the new fitness is better
  2285.                 if new_fitness < self.fitness_scores[i]:
  2286.                     self.population[i] = new_position
  2287.                     self.fitness_scores[i] = new_fitness
  2288.  
  2289.             best_fitness = -np.min(self.fitness_scores)
  2290.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2291.             self.generation_counter += 1
  2292.  
  2293.         end_time = time.time()
  2294.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2295.  
  2296.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2297.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2298.  
  2299.         # Extract the selected features from the best solution
  2300.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2301.         selected_features = [features[i] for i in selected_indices]
  2302.         return selected_features
  2303.  
  2304. import numpy as np
  2305. from sklearn.model_selection import train_test_split
  2306. from sklearn.neighbors import KNeighborsClassifier
  2307. from sklearn.metrics import accuracy_score
  2308. import time
  2309.  
  2310. class MothFlame:
  2311.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2312.         self.X = X
  2313.         self.y = y
  2314.         self.pop_size = pop_size
  2315.         self.max_iter = max_iter
  2316.         self.generation_counter = 0
  2317.         self.selected_features = None
  2318.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2319.         self.population = np.random.rand(pop_size, X.shape[1])
  2320.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2321.  
  2322.     def fitness_function(self, solution):
  2323.         selected_features = np.where(solution > 0.5)[0]
  2324.         if len(selected_features) == 0:
  2325.             return 1  # Penalize solutions with no selected features
  2326.         model = KNeighborsClassifier(n_neighbors=3)
  2327.         model.fit(self.X_train[:, selected_features], self.y_train)
  2328.         y_pred = model.predict(self.X_test[:, selected_features])
  2329.         return -accuracy_score(self.y_test, y_pred)
  2330.  
  2331.     def search(self):
  2332.         start_time = time.time()
  2333.  
  2334.         for generation in range(self.max_iter):
  2335.             for i in range(self.pop_size):
  2336.                 # Create a new position by perturbing the current position
  2337.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2338.                 new_position = np.clip(new_position, 0, 1)
  2339.                 new_fitness = self.fitness_function(new_position)
  2340.  
  2341.                 # Update the position if the new fitness is better
  2342.                 if new_fitness < self.fitness_scores[i]:
  2343.                     self.population[i] = new_position
  2344.                     self.fitness_scores[i] = new_fitness
  2345.  
  2346.             best_fitness = -np.min(self.fitness_scores)
  2347.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2348.             self.generation_counter += 1
  2349.  
  2350.         end_time = time.time()
  2351.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2352.  
  2353.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2354.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2355.  
  2356.         # Extract the selected features from the best solution
  2357.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2358.         selected_features = [features[i] for i in selected_indices]
  2359.         return selected_features
  2360.  
  2361. import numpy as np
  2362. from sklearn.model_selection import train_test_split
  2363. from sklearn.neighbors import KNeighborsClassifier
  2364. from sklearn.metrics import accuracy_score
  2365. import time
  2366.  
  2367. class NonDominatedSortingGeneticOptimization:
  2368.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2369.         self.X = X
  2370.         self.y = y
  2371.         self.pop_size = pop_size
  2372.         self.max_iter = max_iter
  2373.         self.generation_counter = 0
  2374.         self.selected_features = None
  2375.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2376.         self.population = np.random.rand(pop_size, X.shape[1])
  2377.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2378.  
  2379.     def fitness_function(self, solution):
  2380.         selected_features = np.where(solution > 0.5)[0]
  2381.         if len(selected_features) == 0:
  2382.             return 1  # Penalize solutions with no selected features
  2383.         model = KNeighborsClassifier(n_neighbors=3)
  2384.         model.fit(self.X_train[:, selected_features], self.y_train)
  2385.         y_pred = model.predict(self.X_test[:, selected_features])
  2386.         return -accuracy_score(self.y_test, y_pred)
  2387.  
  2388.     def search(self):
  2389.         start_time = time.time()
  2390.  
  2391.         for generation in range(self.max_iter):
  2392.             # Genetic operations: Selection, Crossover, Mutation (basic genetic operations)
  2393.             for i in range(self.pop_size):
  2394.                 # Mimic genetic operations without full implementation
  2395.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2396.                 new_position = np.clip(new_position, 0, 1)
  2397.                 new_fitness = self.fitness_function(new_position)
  2398.  
  2399.                 # Update the population if the new fitness is better
  2400.                 if new_fitness < self.fitness_scores[i]:
  2401.                     self.population[i] = new_position
  2402.                     self.fitness_scores[i] = new_fitness
  2403.  
  2404.             best_fitness = -np.min(self.fitness_scores)
  2405.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2406.             self.generation_counter += 1
  2407.  
  2408.         end_time = time.time()
  2409.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2410.  
  2411.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2412.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2413.  
  2414.         # Extract the selected features from the best solution
  2415.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2416.         selected_features = [features[i] for i in selected_indices]
  2417.         return selected_features
  2418.  
  2419. import numpy as np
  2420. from sklearn.model_selection import train_test_split
  2421. from sklearn.neighbors import KNeighborsClassifier
  2422. from sklearn.metrics import accuracy_score
  2423. import time
  2424.  
  2425. class NuclearReactionOptimization:
  2426.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2427.         self.X = X
  2428.         self.y = y
  2429.         self.pop_size = pop_size
  2430.         self.max_iter = max_iter
  2431.         self.generation_counter = 0
  2432.         self.selected_features = None
  2433.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2434.         self.population = np.random.rand(pop_size, X.shape[1])
  2435.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2436.  
  2437.     def fitness_function(self, solution):
  2438.         selected_features = np.where(solution > 0.5)[0]
  2439.         if len(selected_features) == 0:
  2440.             return 1  # Penalize solutions with no selected features
  2441.         model = KNeighborsClassifier(n_neighbors=3)
  2442.         model.fit(self.X_train[:, selected_features], self.y_train)
  2443.         y_pred = model.predict(self.X_test[:, selected_features])
  2444.         return -accuracy_score(self.y_test, y_pred)
  2445.  
  2446.     def search(self):
  2447.         start_time = time.time()
  2448.  
  2449.         for generation in range(self.max_iter):
  2450.             # Nuclear reaction operations
  2451.             for i in range(self.pop_size):
  2452.                 # Mimic nuclear reaction operations by adding noise to positions
  2453.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2454.                 new_position = np.clip(new_position, 0, 1)
  2455.                 new_fitness = self.fitness_function(new_position)
  2456.  
  2457.                 # Update the population if the new fitness is better
  2458.                 if new_fitness < self.fitness_scores[i]:
  2459.                     self.population[i] = new_position
  2460.                     self.fitness_scores[i] = new_fitness
  2461.  
  2462.             best_fitness = -np.min(self.fitness_scores)
  2463.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2464.             self.generation_counter += 1
  2465.  
  2466.         end_time = time.time()
  2467.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2468.  
  2469.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2470.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2471.  
  2472.         # Extract the selected features from the best solution
  2473.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2474.         selected_features = [features[i] for i in selected_indices]
  2475.         return selected_features
  2476.  
  2477. import numpy as np
  2478. from sklearn.model_selection import train_test_split
  2479. from sklearn.neighbors import KNeighborsClassifier
  2480. from sklearn.metrics import accuracy_score
  2481. import time
  2482.  
  2483. class ParticleSwarm:
  2484.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2485.         self.X = X
  2486.         self.y = y
  2487.         self.pop_size = pop_size
  2488.         self.max_iter = max_iter
  2489.         self.generation_counter = 0
  2490.         self.selected_features = None
  2491.  
  2492.         # Split the dataset into training and testing sets
  2493.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2494.  
  2495.         # Initialize the population and velocities
  2496.         self.population = np.random.rand(pop_size, X.shape[1])
  2497.         self.velocities = np.random.rand(pop_size, X.shape[1])
  2498.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2499.         self.personal_best = self.population.copy()
  2500.         self.personal_best_scores = self.fitness_scores.copy()
  2501.  
  2502.     def fitness_function(self, solution):
  2503.         selected_features = np.where(solution > 0.5)[0]
  2504.         if len(selected_features) == 0:
  2505.             return 1  # Penalize solutions with no selected features
  2506.         model = KNeighborsClassifier(n_neighbors=3)
  2507.         model.fit(self.X_train[:, selected_features], self.y_train)
  2508.         y_pred = model.predict(self.X_test[:, selected_features])
  2509.         return -accuracy_score(self.y_test, y_pred)
  2510.  
  2511.     def search(self):
  2512.         start_time = time.time()
  2513.  
  2514.         for generation in range(self.max_iter):
  2515.             for i in range(self.pop_size):
  2516.                 r1, r2 = np.random.rand(), np.random.rand()
  2517.                 self.velocities[i] += r1 * (self.personal_best[i] - self.population[i]) + r2 * (self.population[np.argmin(self.fitness_scores)] - self.population[i])
  2518.                 self.population[i] += self.velocities[i]
  2519.                 self.population[i] = np.clip(self.population[i], 0, 1)
  2520.  
  2521.                 # Update fitness score for the current particle
  2522.                 self.fitness_scores[i] = self.fitness_function(self.population[i])
  2523.  
  2524.                 # Update personal best if necessary
  2525.                 if self.fitness_scores[i] < self.personal_best_scores[i]:
  2526.                     self.personal_best[i] = self.population[i]
  2527.                     self.personal_best_scores[i] = self.fitness_scores[i]
  2528.  
  2529.             best_fitness = -np.min(self.fitness_scores)
  2530.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2531.             self.generation_counter += 1
  2532.  
  2533.         end_time = time.time()
  2534.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2535.  
  2536.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2537.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2538.  
  2539.         # Extract the selected features from the best solution
  2540.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2541.         selected_features = [features[i] for i in selected_indices]
  2542.         return selected_features
  2543.  
  2544. import numpy as np
  2545. from sklearn.model_selection import train_test_split
  2546. from sklearn.neighbors import KNeighborsClassifier
  2547. from sklearn.metrics import accuracy_score
  2548. import time
  2549.  
  2550. class Pathfinder:
  2551.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2552.         self.X = X
  2553.         self.y = y
  2554.         self.pop_size = pop_size
  2555.         self.max_iter = max_iter
  2556.         self.generation_counter = 0
  2557.         self.selected_features = None
  2558.  
  2559.         # Split the dataset into training and testing sets
  2560.         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=0.3, random_state=42)
  2561.  
  2562.         # Initialize the population
  2563.         self.population = np.random.rand(pop_size, X.shape[1])
  2564.         self.fitness_scores = np.array([self.fitness_function(ind) for ind in self.population])
  2565.  
  2566.     def fitness_function(self, solution):
  2567.         selected_features = np.where(solution > 0.5)[0]
  2568.         if len(selected_features) == 0:
  2569.             return 1  # Penalize solutions with no selected features
  2570.         model = KNeighborsClassifier(n_neighbors=3)
  2571.         model.fit(self.X_train[:, selected_features], self.y_train)
  2572.         y_pred = model.predict(self.X_test[:, selected_features])
  2573.         return -accuracy_score(self.y_test, y_pred)
  2574.  
  2575.     def search(self):
  2576.         start_time = time.time()
  2577.  
  2578.         for generation in range(self.max_iter):
  2579.             for i in range(self.pop_size):
  2580.                 new_position = self.population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2581.                 new_position = np.clip(new_position, 0, 1)
  2582.  
  2583.                 # Evaluate new position
  2584.                 new_fitness = self.fitness_function(new_position)
  2585.                 if new_fitness < self.fitness_scores[i]:
  2586.                     self.population[i] = new_position
  2587.                     self.fitness_scores[i] = new_fitness
  2588.  
  2589.             best_fitness = -np.min(self.fitness_scores)
  2590.             print(f"Generation {self.generation_counter}: Best fitness = {best_fitness}")
  2591.             self.generation_counter += 1
  2592.  
  2593.         end_time = time.time()
  2594.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2595.  
  2596.         best_solution_index = np.argmin(self.fitness_scores)  # Index of the best solution
  2597.         best_solution = self.population[best_solution_index]  # The solution itself (binary array)
  2598.  
  2599.         # Extract the selected features from the best solution
  2600.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2601.         selected_features = [features[i] for i in selected_indices]
  2602.         return selected_features
  2603.  
  2604. import numpy as np
  2605. from sklearn.model_selection import train_test_split
  2606. from sklearn.neighbors import KNeighborsClassifier
  2607. from sklearn.metrics import accuracy_score
  2608. import time
  2609.  
  2610. class QueuingSearch:
  2611.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2612.         self.X = X
  2613.         self.y = y
  2614.         self.pop_size = pop_size
  2615.         self.max_iter = max_iter
  2616.         self.generation_counter = 0
  2617.         self.selected_features = []
  2618.  
  2619.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  2620.         selected_features = np.where(solution > 0.5)[0]
  2621.         if len(selected_features) == 0:
  2622.             return 1  # Return a high penalty for no features selected
  2623.         model = KNeighborsClassifier(n_neighbors=3)
  2624.         model.fit(X_train[:, selected_features], y_train)
  2625.         y_pred = model.predict(X_test[:, selected_features])
  2626.         return -accuracy_score(y_test, y_pred)
  2627.  
  2628.     def search(self):
  2629.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  2630.         population = np.random.rand(self.pop_size, self.X.shape[1])
  2631.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  2632.  
  2633.         start_time = time.time()
  2634.  
  2635.         for generation in range(self.max_iter):
  2636.             for i in range(self.pop_size):
  2637.                 new_position = population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2638.                 new_position = np.clip(new_position, 0, 1)
  2639.                 if self.fitness_function(new_position, X_train, X_test, y_train, y_test) < fitness_scores[i]:
  2640.                     population[i] = new_position
  2641.  
  2642.             print(f"Generation {self.generation_counter}: Best fitness = {-np.min(fitness_scores)}")
  2643.             self.generation_counter += 1
  2644.  
  2645.         end_time = time.time()
  2646.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2647.  
  2648.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  2649.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  2650.         # Extract the selected features from the best solution
  2651.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2652.         selected_features = [features[i] for i in selected_indices]
  2653.         return selected_features
  2654.  
  2655. import numpy as np
  2656. from sklearn.model_selection import train_test_split
  2657. from sklearn.neighbors import KNeighborsClassifier
  2658. from sklearn.metrics import accuracy_score
  2659. import time
  2660.  
  2661. class PlusLMinusR:
  2662.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2663.         self.X = X
  2664.         self.y = y
  2665.         self.pop_size = pop_size
  2666.         self.max_iter = max_iter
  2667.         self.generation_counter = 0
  2668.         self.selected_features = []
  2669.  
  2670.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  2671.         selected_features = np.where(solution > 0.5)[0]
  2672.         if len(selected_features) == 0:
  2673.             return 1  # Return a high penalty for no features selected
  2674.         model = KNeighborsClassifier(n_neighbors=3)
  2675.         model.fit(X_train[:, selected_features], y_train)
  2676.         y_pred = model.predict(X_test[:, selected_features])
  2677.         return -accuracy_score(y_test, y_pred)
  2678.  
  2679.     def search(self):
  2680.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  2681.         population = np.random.rand(self.pop_size, self.X.shape[1])
  2682.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  2683.  
  2684.         start_time = time.time()
  2685.  
  2686.         for generation in range(self.max_iter):
  2687.             for i in range(self.pop_size):
  2688.                 L = np.mean(population, axis=0)
  2689.                 R = np.random.rand(self.X.shape[1])
  2690.                 new_position = L + R * (population[i] - L)
  2691.                 new_position = np.clip(new_position, 0, 1)
  2692.                 if self.fitness_function(new_position, X_train, X_test, y_train, y_test) < fitness_scores[i]:
  2693.                     population[i] = new_position
  2694.  
  2695.             print(f"Generation {self.generation_counter}: Best fitness = {-np.min(fitness_scores)}")
  2696.             self.generation_counter += 1
  2697.  
  2698.         end_time = time.time()
  2699.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2700.  
  2701.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  2702.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  2703.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2704.         selected_features = [features[i] for i in selected_indices]
  2705.         return selected_features
  2706.  
  2707. import numpy as np
  2708. from sklearn.model_selection import train_test_split
  2709. from sklearn.neighbors import KNeighborsClassifier
  2710. from sklearn.metrics import accuracy_score
  2711. import time
  2712.  
  2713. class Sailfish:
  2714.     def __init__(self, X, y, pop_size=20, max_iter=10):
  2715.         self.X = X
  2716.         self.y = y
  2717.         self.pop_size = pop_size
  2718.         self.max_iter = max_iter
  2719.         self.generation_counter = 0
  2720.         self.selected_features = []
  2721.  
  2722.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  2723.         selected_features = np.where(solution > 0.5)[0]
  2724.         if len(selected_features) == 0:
  2725.             return 1  # Return a high penalty for no features selected
  2726.         model = KNeighborsClassifier(n_neighbors=3)
  2727.         model.fit(X_train[:, selected_features], y_train)
  2728.         y_pred = model.predict(X_test[:, selected_features])
  2729.         return -accuracy_score(y_test, y_pred)
  2730.  
  2731.     def search(self):
  2732.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  2733.         population = np.random.rand(self.pop_size, self.X.shape[1])
  2734.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  2735.  
  2736.         start_time = time.time()
  2737.  
  2738.         for generation in range(self.max_iter):
  2739.             for i in range(self.pop_size):
  2740.                 new_position = population[i] + np.random.normal(0, 0.1, size=self.X.shape[1])
  2741.                 new_position = np.clip(new_position, 0, 1)
  2742.                 if self.fitness_function(new_position, X_train, X_test, y_train, y_test) < fitness_scores[i]:
  2743.                     population[i] = new_position
  2744.  
  2745.             print(f"Generation {self.generation_counter}: Best fitness = {-np.min(fitness_scores)}")
  2746.             self.generation_counter += 1
  2747.  
  2748.         end_time = time.time()
  2749.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2750.  
  2751.         best_solution_index = np.argmin(fitness_scores)  # Index of the best solution
  2752.         best_solution = population[best_solution_index]  # The solution itself (binary array)
  2753.         # Extract the selected features from the best solution
  2754.         selected_indices = np.where(best_solution > 0.5)[0]  # Indices of the selected features
  2755.         selected_features = [features[i] for i in selected_indices]
  2756.         return selected_features
  2757.  
  2758. import numpy as np
  2759. import time
  2760. from sklearn.model_selection import train_test_split
  2761. from sklearn.neighbors import KNeighborsClassifier
  2762. from sklearn.metrics import accuracy_score
  2763.  
  2764. class ShuffleFrogLeaping:
  2765.     def __init__(self, X, y, pop_size=20, max_iter=10, leaping_rate=0.5):
  2766.         self.X = X  # Dataset features
  2767.         self.y = y  # Dataset labels
  2768.         self.pop_size = pop_size
  2769.         self.max_iter = max_iter
  2770.         self.leaping_rate = leaping_rate
  2771.         self.selected_features = None
  2772.  
  2773.     def fitness_function(self, solution, X_train, X_test, y_train, y_test):
  2774.         selected_features = np.where(solution > 0.5)[0]
  2775.         if len(selected_features) == 0:
  2776.             return 1  # Return a high penalty for no features selected
  2777.         model = KNeighborsClassifier(n_neighbors=3)
  2778.         model.fit(X_train[:, selected_features], y_train)
  2779.         y_pred = model.predict(X_test[:, selected_features])
  2780.         return -accuracy_score(y_test, y_pred)
  2781.  
  2782.     def search(self):
  2783.         # Split the dataset into training and testing sets
  2784.         X_train, X_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.3, random_state=42)
  2785.  
  2786.         # Initialize population with random values
  2787.         population = np.random.rand(self.pop_size, self.X.shape[1])
  2788.         fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  2789.  
  2790.         start_time = time.time()
  2791.         for iteration in range(self.max_iter):
  2792.             # Sort indices based on fitness scores (lower is better)
  2793.             sorted_indices = np.argsort(fitness_scores)
  2794.             elite_frogs = population[sorted_indices[:self.pop_size // 2]]
  2795.  
  2796.             for i in range(self.pop_size // 2):
  2797.                 if np.random.rand() < self.leaping_rate:
  2798.                     frog = elite_frogs[i]
  2799.                     new_frog = frog + np.random.uniform(-0.1, 0.1, size=frog.shape)
  2800.                     new_frog = np.clip(new_frog, 0, 1)
  2801.                     population[sorted_indices[i]] = new_frog
  2802.  
  2803.             fitness_scores = np.array([self.fitness_function(ind, X_train, X_test, y_train, y_test) for ind in population])
  2804.  
  2805.         end_time = time.time()
  2806.         print(f"Total time taken for optimization: {end_time - start_time:.2f} seconds")
  2807.  
  2808.         # Find the index of the best solution
  2809.         best_solution_index = np.argmin(fitness_scores)
  2810.         best_solution = population[best_solution_index]
  2811.  
  2812.         # Extract the selected features from the best solution
  2813.         selected_indices = np.where(best_solution > 0.5)[0]
  2814.         selected_features = [features[i] for i in selected_indices]
  2815.         return selected_features
  2816.  
  2817. import os
  2818.  
  2819. import os
  2820. import pandas as pd
  2821.  
  2822. def feature_selection(X, y, full_data, MAX_ITER, output_dir):
  2823.     # Define paths for Data and Summary folders
  2824.     data_dir = os.path.join(output_dir, 'Data')
  2825.     summary_dir = os.path.join(output_dir, 'Summary')
  2826.     print("Welcome")
  2827.  
  2828.     # Ensure the output directories exist
  2829.     os.makedirs(data_dir, exist_ok=True)
  2830.     os.makedirs(summary_dir, exist_ok=True)
  2831.  
  2832.     # List of feature selection algorithms to call
  2833.     algorithms = [
  2834.         CuckooSearch, EvolutionaryProgramming, Firefly, AdaptiveBacterialForaging,
  2835.         AntColony, ArtificialBeeColony, SineCosine, SocialSpider, Symbiotic,
  2836.         BacterialForaging, Bat, BigBangBigCrunch, Biogeography, TugOfWar,
  2837.         WaterCycle, WhaleOptimization, WhaleSwarmOptimization, CatSwarmOptimizer,
  2838.         ChickenSwarmOptimizer, ClonalSelectionOptimizer, CoralReefsOptimizer,
  2839.         FireworkOptimization, FlowerPollination, GravitationalSearch,
  2840.         GrayWolfOptimization, GreenHeronsOptimization, GreyWolfOptimizer,
  2841.         HarmonySearch, HarrisHawk, HenryGasSolubility, InvasiveWeed,
  2842.         KrillHerd, MothFlame, NonDominatedSortingGeneticOptimization,
  2843.         NuclearReactionOptimization, ParticleSwarm, Pathfinder,
  2844.         QueuingSearch, PlusLMinusR, Sailfish, ShuffleFrogLeaping
  2845.     ]
  2846.  
  2847.     # Iterate over each algorithm
  2848.     for algorithm in algorithms:
  2849.         algorithm_name = algorithm.__name__
  2850.  
  2851.         # Instantiate the algorithm class
  2852.         selector = algorithm(X, y, MAX_ITER)  # Assuming the class constructor takes X, y, and MAX_ITER
  2853.  
  2854.         # Call the search method to get selected features
  2855.         print(algorithm_name)
  2856.         selected_features = selector.search()
  2857.  
  2858.         # Generate summary CSV: Algorithm name, number of selected features, and feature list
  2859.         num_selected_features = len(selected_features)
  2860.         summary_data = {
  2861.             "Algorithm": [algorithm_name],
  2862.             "Number of Selected Features": [num_selected_features],
  2863.             "Selected Features": [", ".join(map(str, selected_features))]  # Ensure features are converted to string
  2864.         }
  2865.         summary_df = pd.DataFrame(summary_data)
  2866.         summary_file_path = os.path.join(summary_dir, f"{algorithm_name}_summary.csv")
  2867.         summary_df.to_csv(summary_file_path, index=False)
  2868.  
  2869.         # Generate selected data CSV: Full data with selected features and label
  2870.         selected_data_df = full_data[selected_features + ['label']].copy()
  2871.         selected_data_file_path = os.path.join(data_dir, f"{algorithm_name}_selected_data.csv")
  2872.         selected_data_df.to_csv(selected_data_file_path, index=False)
  2873.  
  2874. output_dir=''
  2875.  
  2876. X.shape
  2877.  
  2878. y.shape
  2879.  
  2880. data.shape
  2881.  
  2882. feature_selection(X, y, data, MAX_ITER,output_dir)
  2883.  
  2884. """# GAN"""
  2885.  
  2886. import numpy as np
  2887. import pandas as pd
  2888. import os
  2889. from tensorflow.keras.models import Sequential, Model
  2890. from tensorflow.keras.layers import Dense, LeakyReLU, Input, Embedding, Concatenate, Flatten
  2891. from tensorflow.keras.optimizers import RMSprop
  2892.  
  2893. # Function to create a basic GAN generator model
  2894. def create_standard_gan_generator(input_dim, output_dim):
  2895.     model = Sequential()
  2896.     model.add(Dense(256, input_dim=input_dim))
  2897.     model.add(LeakyReLU(alpha=0.2))
  2898.     model.add(Dense(512))
  2899.     model.add(LeakyReLU(alpha=0.2))
  2900.     model.add(Dense(output_dim, activation='tanh'))
  2901.     return model
  2902.  
  2903. def create_cgan_generator(latent_dim, output_dim, num_classes):
  2904.     # Define label input and embedding layer for labels
  2905.     label = Input(shape=(1,), name='label_input')
  2906.     label_embedding = Embedding(num_classes, latent_dim, input_length=1)(label)  # Embed to match `latent_dim`
  2907.     label_embedding = Flatten()(label_embedding)  # Flatten embedding to concatenate
  2908.  
  2909.     # Define noise input
  2910.     noise = Input(shape=(latent_dim,), name='noise_input')
  2911.  
  2912.     # Concatenate noise and label embedding
  2913.     combined_input = Concatenate()([noise, label_embedding])  # This shape is (latent_dim + latent_dim)
  2914.  
  2915.     # Build generator model with combined input
  2916.     x = Dense(256)(combined_input)
  2917.     x = LeakyReLU(alpha=0.2)(x)
  2918.     x = Dense(512)(x)
  2919.     x = LeakyReLU(alpha=0.2)(x)
  2920.     generator_output = Dense(output_dim, activation='tanh')(x)
  2921.  
  2922.     # Create the model
  2923.     model = Model([noise, label], generator_output)
  2924.     return model
  2925.  
  2926. # Function to create a Wasserstein GAN (WGAN) generator model
  2927. def create_wgan_generator(input_dim, output_dim):
  2928.     model = Sequential()
  2929.     model.add(Dense(256, input_dim=input_dim))
  2930.     model.add(LeakyReLU(alpha=0.2))
  2931.     model.add(Dense(512))
  2932.     model.add(LeakyReLU(alpha=0.2))
  2933.     model.add(Dense(output_dim, activation='tanh'))
  2934.     return model
  2935.  
  2936. def generate_samples(generator, n_samples, latent_dim, gan_type, num_classes=None, cls=None):
  2937.     noise = np.random.normal(0, 1, (n_samples, latent_dim))
  2938.  
  2939.     if gan_type == "cGAN" and cls is not None:
  2940.         labels = np.full((n_samples, 1), cls)
  2941.         generated_samples = generator.predict([noise, labels])
  2942.     else:
  2943.         generated_samples = generator.predict(noise)
  2944.  
  2945.     return generated_samples
  2946.  
  2947. def generate_data_with_gans(data, output_dir, base_name, latent_dim=100, samples_per_class=1000):
  2948.     os.makedirs(output_dir, exist_ok=True)
  2949.     classes = np.unique(data['label'])
  2950.     num_features = data.shape[1] - 1
  2951.     num_classes = len(classes)
  2952.  
  2953.     for gan_type in ["StandardGAN", "cGAN", "WGAN"]:
  2954.         all_generated_data = []
  2955.  
  2956.         for cls in classes:
  2957.             if gan_type == "StandardGAN":
  2958.                 generator = create_standard_gan_generator(latent_dim, num_features)
  2959.                 generated_samples = generate_samples(generator, samples_per_class, latent_dim, gan_type)
  2960.  
  2961.             elif gan_type == "cGAN":
  2962.                 generator = create_cgan_generator(latent_dim, num_features, num_classes)
  2963.                 generated_samples = generate_samples(generator, samples_per_class, latent_dim, gan_type, num_classes, cls)
  2964.  
  2965.             elif gan_type == "WGAN":
  2966.                 generator = create_wgan_generator(latent_dim, num_features)
  2967.                 generated_samples = generate_samples(generator, samples_per_class, latent_dim, gan_type)
  2968.  
  2969.             generated_label = np.full((samples_per_class, 1), cls)
  2970.             generated_data = np.hstack((generated_samples, generated_label))
  2971.             all_generated_data.append(generated_data)
  2972.  
  2973.         all_generated_data = np.vstack(all_generated_data)
  2974.         df_generated = pd.DataFrame(all_generated_data, columns=[*data.columns[:-1], 'label'])
  2975.  
  2976.         filename = os.path.join(output_dir, f"{base_name}_{gan_type}.csv")
  2977.         df_generated.to_csv(filename, index=False)
  2978.         print(f"Data for {gan_type} generated and saved successfully as:", filename)
  2979.  
  2980. import os
  2981. import pandas as pd
  2982.  
  2983. # Define the input and output directories
  2984. input_dir = './Data'
  2985. output_dir = './GAN'
  2986. latent_dim = 134
  2987.  
  2988. # Loop through each file in the input directory
  2989. for filename in os.listdir(input_dir):
  2990.     if filename.endswith('_selected_data.csv'):  # Process only files with the specific suffix
  2991.         file_path = os.path.join(input_dir, filename)
  2992.  
  2993.         # Extract the base file name (remove "_selected_data.csv")
  2994.         base_name = filename.replace('_selected_data.csv', '')
  2995.  
  2996.         # Read the data from the CSV file
  2997.         data = pd.read_csv(file_path)
  2998.  
  2999.         # Generate data with GANs for each file
  3000.         print(f"Processing file: {base_name}")
  3001.         generate_data_with_gans(data, output_dir, base_name, latent_dim=latent_dim)
  3002.         print(f"Finished processing file: {base_name}\n")
  3003.        
  3004.        
  3005. end_time = datetime.now()
  3006. print('Duration: {}'.format(end_time - start_time))
  3007.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement