I am having issue while importing custom gym environment through raylib , as mentioned in the documentation, there is a warning that gym env registeration is not always compatible with ray. so we can pass our environment class name directly. but my custom env have more than one arguments and from the way defined i simply pass the required arguments , but it’s giving me an error that 2 positional arguments are missing , it’s not considering all the arguments i have given.
Also I have tried another ways but like feeding instance of my custom env but it’s not accpeting instance of the class.
Also defining the env class in a wrapper i.e env_creator , it also didn’t work.
class IntradayTradingEnv(gym.Env): def __init__(self, df: pd.DataFrame, num_stock_shares: list[int], tech_indicator_list: list[str], day = 0, initial=True, previous_state=[], model_name="", mode="", iteration="", n_days = 1, frame_stack = False, n_stack = 5, reward_fun = 1, use_multiCore = False, cwd = "./../Archive/Active_Directory", reward_P = 2, reward_L = 2, closing_bias = -1, closing_period = 4 ) ->
None:
`
import gymnasium as gym
import ray
from ray.rllib.algorithms import ppo
from application.PSW import config
ray.init(ignore_reinit_error=True)
config = ppo.PPOConfig().environment(env=env, env_config={“df”: df,
“num_stock_shares”: [0],
“tech_indicator_list”: config.INDICATORS })
algo = config.build()
for _ in range(3):
print(algo.train())
algo.stop()
`
`
TypeError Traceback (most recent call last)
Cell In[4], line 36
8 # def env_creator(env_config):
9 # return IntradayTradingEnv(
10 # df=env_config[“df”],
(…)
28 # enable_rl_module_and_learner=True,
29 # enable_env_runner_and_connector_v2=True,
33 config = ppo.PPOConfig().environment(env=env, env_config={“df”: df,
…
758 str(env.class.base) == “<class ‘gym.core.Env’>”
759 or str(env.class.base) == “<class ‘gym.core.Wrapper’>”
760 ):
TypeError: IntradayTradingEnv.init() missing 2 required positional arguments: ‘num_stock_shares’ and ‘tech_indicator_list’ was raised from the environment creator for rllib-single-agent-env-v0 with kwargs ({})
`