|
14 | 14 |
|
15 | 15 |
|
16 | 16 | class NLELanguageWrapper(language_wrapper.NLELanguageWrapper): |
17 | | - def __init__(self, env, vlm=False, skip_more=False): |
| 17 | + def __init__(self, env, vlm=False): |
18 | 18 | super().__init__(env, use_language_action=True) |
19 | 19 | self.nle_language = nle_language_obsv.NLELanguageObsv() |
20 | 20 | self.language_action_space = self.create_action_space() |
21 | 21 | self.env = env |
22 | 22 | self.vlm = vlm |
23 | 23 | self.done = False |
24 | | - self.skip_more = skip_more |
25 | 24 |
|
26 | 25 | if not vlm: |
27 | 26 | self.prompt_mode = "hybrid" |
@@ -77,19 +76,6 @@ def nle_obsv_type(self, nle_obsv): |
77 | 76 | else: |
78 | 77 | raise ValueError(f'"{self.prompt_mode}" is not a valid prompt mode.') |
79 | 78 |
|
80 | | - def clean_message(self, nle_obsv): |
81 | | - message = self.nle_language.text_message(nle_obsv["tty_chars"]).decode("latin-1") |
82 | | - if not self.skip_more: |
83 | | - while "--More--" in message and not self.done: |
84 | | - message = message.replace("--More--", " ") |
85 | | - message = message.replace("\n", " ") |
86 | | - |
87 | | - nle_obsv, reward, self.done, info = self.step("more") |
88 | | - add = self.nle_language.text_message(nle_obsv["obs"]["tty_chars"]).decode("latin-1") |
89 | | - message += add |
90 | | - return message, nle_obsv["obs"] |
91 | | - return message, nle_obsv |
92 | | - |
93 | 79 | def render(self, mode="human"): |
94 | 80 | if mode == "tiles": |
95 | 81 | obs = self.env.last_observation |
@@ -150,7 +136,11 @@ def nle_obsv_to_language(self, nle_obsv): |
150 | 136 | (dict): language observation |
151 | 137 | """ |
152 | 138 |
|
153 | | - message, nle_obsv = self.clean_message(nle_obsv) |
| 139 | + message = ( |
| 140 | + nle_obsv["text_message"] |
| 141 | + if "text_message" in nle_obsv |
| 142 | + else self.nle_language.text_message(nle_obsv["tty_chars"]).decode("latin-1") |
| 143 | + ) |
154 | 144 |
|
155 | 145 | glyphs = nle_obsv["glyphs"] |
156 | 146 | blstats = nle_obsv["blstats"] |
|
0 commit comments