mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-13 08:27:03 +00:00
Remove deprecated param and flexibility for prompt (#13310)
- **Description:** Updated to remove deprecated parameter penalty_alpha, and use string variation of prompt rather than json object for better flexibility. - **Issue:** the issue # it fixes (if applicable), - **Dependencies:** N/A - **Tag maintainer:** @eyurtsev - **Twitter handle:** @symbldotai --------- Co-authored-by: toshishjawale <toshish@symbl.ai> Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
This commit is contained in:
parent
3eb391561b
commit
6f64cb5078
@ -57,8 +57,7 @@ class Nebula(LLM):
|
||||
temperature: Optional[float] = 0.6
|
||||
top_p: Optional[float] = 0.95
|
||||
repetition_penalty: Optional[float] = 1.0
|
||||
top_k: Optional[int] = 0
|
||||
penalty_alpha: Optional[float] = 0.0
|
||||
top_k: Optional[int] = 1
|
||||
stop_sequences: Optional[List[str]] = None
|
||||
max_retries: Optional[int] = 10
|
||||
|
||||
@ -106,7 +105,6 @@ class Nebula(LLM):
|
||||
"top_k": self.top_k,
|
||||
"top_p": self.top_p,
|
||||
"repetition_penalty": self.repetition_penalty,
|
||||
"penalty_alpha": self.penalty_alpha,
|
||||
}
|
||||
|
||||
@property
|
||||
@ -162,16 +160,10 @@ class Nebula(LLM):
|
||||
"""
|
||||
params = self._invocation_params(stop, **kwargs)
|
||||
prompt = prompt.strip()
|
||||
if "\n" in prompt:
|
||||
instruction = prompt.split("\n")[0]
|
||||
conversation = "\n".join(prompt.split("\n")[1:])
|
||||
else:
|
||||
raise ValueError("Prompt must contain instruction and conversation.")
|
||||
|
||||
response = completion_with_retry(
|
||||
self,
|
||||
instruction=instruction,
|
||||
conversation=conversation,
|
||||
prompt=prompt,
|
||||
params=params,
|
||||
url=f"{self.nebula_service_url}{self.nebula_service_path}",
|
||||
)
|
||||
@ -181,8 +173,7 @@ class Nebula(LLM):
|
||||
|
||||
def make_request(
|
||||
self: Nebula,
|
||||
instruction: str,
|
||||
conversation: str,
|
||||
prompt: str,
|
||||
url: str = f"{DEFAULT_NEBULA_SERVICE_URL}{DEFAULT_NEBULA_SERVICE_PATH}",
|
||||
params: Optional[Dict] = None,
|
||||
) -> Any:
|
||||
@ -196,12 +187,7 @@ def make_request(
|
||||
"ApiKey": f"{api_key}",
|
||||
}
|
||||
|
||||
body = {
|
||||
"prompt": {
|
||||
"instruction": instruction,
|
||||
"conversation": {"text": f"{conversation}"},
|
||||
}
|
||||
}
|
||||
body = {"prompt": prompt}
|
||||
|
||||
# add params to body
|
||||
for key, value in params.items():
|
||||
|
@ -41,8 +41,7 @@ Rhea: Thanks, bye!"""
|
||||
|
||||
instruction = """Identify the main objectives mentioned in this
|
||||
conversation."""
|
||||
prompt = PromptTemplate.from_template("{instruction}\n{conversation}")
|
||||
|
||||
prompt = PromptTemplate.from_template(template="{instruction}\n{conversation}")
|
||||
llm_chain = LLMChain(prompt=prompt, llm=llm)
|
||||
output = llm_chain.run(instruction=instruction, conversation=conversation)
|
||||
assert isinstance(output, str)
|
||||
|
Loading…
Reference in New Issue
Block a user