Skip to content

Commit 4f42230

Browse files
authored
Merge pull request #241 from gkumbhat/fix_run_sequencing
🚑🔧 Fix sequencing of arguments on run function
2 parents 2141813 + ae7ae5c commit 4f42230

File tree

4 files changed

+6
-6
lines changed

4 files changed

+6
-6
lines changed

caikit_nlp/modules/text_generation/peft_prompt_tuning.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,13 +162,13 @@ def run(
162162
top_p: Optional[float] = None,
163163
typical_p: Optional[float] = None,
164164
temperature: Optional[float] = None,
165-
seed: Optional[np.uint64] = None,
166165
repetition_penalty: Optional[float] = None,
167166
max_time: Optional[float] = None,
168167
exponential_decay_length_penalty: Optional[
169168
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
170169
] = None,
171170
stop_sequences: Optional[List[str]] = None,
171+
seed: Optional[np.uint64] = None,
172172
) -> GeneratedTextResult:
173173
f"""
174174
Run the full text generation model.

caikit_nlp/modules/text_generation/peft_tgis_remote.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,13 +188,13 @@ def run(
188188
top_p: Optional[float] = None,
189189
typical_p: Optional[float] = None,
190190
temperature: Optional[float] = None,
191-
seed: Optional[np.uint64] = None,
192191
repetition_penalty: Optional[float] = None,
193192
max_time: Optional[float] = None,
194193
exponential_decay_length_penalty: Optional[
195194
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
196195
] = None,
197196
stop_sequences: Optional[List[str]] = None,
197+
seed: Optional[np.uint64] = None,
198198
preserve_input_text: bool = False,
199199
) -> GeneratedTextResult:
200200
f"""Run inference against the model running in TGIS.

caikit_nlp/modules/text_generation/text_generation_tgis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,13 +214,13 @@ def run(
214214
top_p: Optional[float] = None,
215215
typical_p: Optional[float] = None,
216216
temperature: Optional[float] = None,
217-
seed: Optional[np.uint64] = None,
218217
repetition_penalty: Optional[float] = None,
219218
max_time: Optional[float] = None,
220219
exponential_decay_length_penalty: Optional[
221220
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
222221
] = None,
223222
stop_sequences: Optional[List[str]] = None,
223+
seed: Optional[np.uint64] = None,
224224
preserve_input_text: bool = False,
225225
) -> GeneratedTextResult:
226226
f"""Run inference against the model running in TGIS.

caikit_nlp/toolkit/text_generation/model_run_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,6 @@
8181
The value used to modulate the next token probabilities.
8282
Only applicable when decoding_method is SAMPLING.
8383
Default: 1.0 - means disabled - equivalent to 1.0
84-
seed: numpy.uint64
85-
Random seed to control sampling. Only applicable when decoding_method
86-
is SAMPLING. Default: None
8784
repetition_penalty: float
8885
The more a token is used within generation the more it is penalized
8986
to not be picked in successive generation passes.
@@ -100,6 +97,9 @@
10097
of exponential decay
10198
stop_sequences: List[str]
10299
List of strings to be used as stopping criteria
100+
seed: numpy.uint64
101+
Random seed to control sampling. Only applicable when decoding_method
102+
is SAMPLING. Default: None
103103
"""
104104

105105

0 commit comments

Comments
 (0)