PYGAD issue with Parallel Processing

494 Views Asked by At

I am using PYGAD for optimizing parameters for a backtrading strategy. It switches from using multiple threads to single threads every generation. I am confused as to why. It was working perfectly fine previously. I changed the mutation_type to 'adaptive' to attempt to improve my fitness. However, it seems to be messing with the parallel processing. I have also began running an AWS instance and running code on that while the genetic algorithm runs locally on my machine, but I do not think that would affect the processes running on my local machine. I am using a MacBook Pro with the M1 Max Chip and 10 cores (8 performance, 2 efficiency). If anyone has a clue as to why, or a solution, I would greatly appreciate it. Here is the code:

def fitness_func(ga_instance, solution, solution_idx):
    # Transform the solution array into a dictionary of parameters
    try:
        
        params = {
            'rsi_periods' : int(solution[0]),
            'macd1': int(solution[1]),
            'macd2': int(solution[2]),
            'macdsig': int(solution[3]),
            'rsi_high': int(solution[4]),
            'stop_loss' : float(solution[5]),
            'take_profit' : float(solution[6]),
            'rsi_low': int(solution[7]),
            'rsi_mid' : int(solution[8])
        }

        # Create a Backtrader instance and add your strategy with the given parameters
        cerebro = bt.Cerebro()
        cerebro.broker.setcommission(commission=0.0075)
        cerebro.addstrategy(RsiMacdStrategy, **params)
        cerebro.broker.setcash(2500.0)
        data = get_data('DOT')
        cerebro.adddata(data)
        # Run the backtest and get the final portfolio value
        results = cerebro.run()
        final_value = results[0].broker.get_value()
        global finished_counter
        finished_counter += 1
        print(f"Finished {finished_counter}")
        # The final portfolio value is used as the fitness score for the genetic algorithm
        return final_value
    except Exception as e:
        print(f"Error in fitness function: {e}")
        raise e




def on_generation(ga_instance):
    # This function is called after each generation of the genetic algorithm
    # You can use it to track progress or print debug information
    try: 
        # Get the best solution from the current generation
        best_solution, best_solution_fitness, best_solution_idx = ga_instance.best_solution()

        # Print the generation number, best fitness score, and best solution
        print(f"Generation: {ga_instance.generations_completed}, Best fitness: {best_solution_fitness}, Best solution: {best_solution}")

        # Write the best solution and fitness score of the current generation to the CSV file
        with open('/Users/.../Desktop/.../Coding/Sentiment Analysis/code/backTesting/results/dotPYGAD.csv', 'a') as f:
            writer = csv.writer(f)
            writer.writerow(list(best_solution) + [best_solution_fitness])

    except Exception as e:
        print(f"Error in on_generation function: {e}")
        raise e



def run_backtest():
    # Define the parameters for your genetic algorithm here
    # Each item in param_types and param_ranges corresponds to a parameter of RsiMacdStrategy
    param_types = [int, int, int, int, int, float, float, int, int]
    param_ranges = [{"low" : 5, "high" : 35}, {"low": 5, "high": 25}, {"low": 10, "high": 45}, {"low": 4, "high": 12}, {"low": 65, "high": 90},
                    {"low": .04, "high" : .1}, {"low": .075, "high" : .2}, {"low" : 35, "high" : 49}, {"low" : 51, "high" : 65}]

    # Initialize the genetic algorithm with the parameters and fitness function
    ga_instance = pygad.GA(num_generations=15,
                            num_parents_mating=30,
                            fitness_func=fitness_func,
                            sol_per_pop=100,
                            num_genes=len(param_types),
                            gene_type=param_types,
                            gene_space=param_ranges,
                            parent_selection_type="rank",
                            crossover_type="single_point",
                            mutation_type="adaptive",
                            mutation_probability=[.6, .2],
                            keep_parents=1,
                            parallel_processing=["process", 8],
                            suppress_warnings=True,
                            keep_elitism=1,
                            on_generation=on_generation)  # Call on_generation after each generation

    # Run the genetic algorithm
    ga_instance.run()
    solution, solution_fitness, solution_idx = ga_instance.best_solution()
    print(f"Best solution: {solution}, Fitness: {solution_fitness}")

0

There are 0 best solutions below