diff --git a/docs/docs/how_to/assign.ipynb b/docs/docs/how_to/assign.ipynb index b231b0fb781..94efd41e532 100644 --- a/docs/docs/how_to/assign.ipynb +++ b/docs/docs/how_to/assign.ipynb @@ -157,7 +157,7 @@ "\n", "## Next steps\n", "\n", - "Now you've learned how to pass data through your chains to help to help format the data flowing through your chains.\n", + "Now you've learned how to pass data through your chains to help format the data flowing through your chains.\n", "\n", "To learn more, see the other how-to guides on runnables in this section." ] diff --git a/docs/docs/integrations/chat/fireworks.ipynb b/docs/docs/integrations/chat/fireworks.ipynb index d3ddfccd1e3..1924151910a 100644 --- a/docs/docs/integrations/chat/fireworks.ipynb +++ b/docs/docs/integrations/chat/fireworks.ipynb @@ -17,7 +17,7 @@ "source": [ "# ChatFireworks\n", "\n", - "This doc help you get started with Fireworks AI [chat models](/docs/concepts/chat_models). For detailed documentation of all ChatFireworks features and configurations head to the [API reference](https://python.langchain.com/api_reference/fireworks/chat_models/langchain_fireworks.chat_models.ChatFireworks.html).\n", + "This doc helps you get started with Fireworks AI [chat models](/docs/concepts/chat_models). For detailed documentation of all ChatFireworks features and configurations head to the [API reference](https://python.langchain.com/api_reference/fireworks/chat_models/langchain_fireworks.chat_models.ChatFireworks.html).\n", "\n", "Fireworks AI is an AI inference platform to run and customize models. For a list of all models served by Fireworks see the [Fireworks docs](https://fireworks.ai/models).\n", "\n", @@ -39,7 +39,7 @@ "\n", "### Credentials\n", "\n", - "Head to (ttps://fireworks.ai/login to sign up to Fireworks and generate an API key. Once you've done this set the FIREWORKS_API_KEY environment variable:" + "Head to (https://fireworks.ai/login to sign up to Fireworks and generate an API key. Once you've done this set the FIREWORKS_API_KEY environment variable:" ] }, { diff --git a/docs/docs/integrations/chat/writer.ipynb b/docs/docs/integrations/chat/writer.ipynb index bb71be8eb86..bb43b4b332a 100644 --- a/docs/docs/integrations/chat/writer.ipynb +++ b/docs/docs/integrations/chat/writer.ipynb @@ -303,7 +303,7 @@ "source": [ "### A note on tool binding\n", "\n", - "The `ChatWriter.bind_tools()` method does not create new instance with bound tools, but stores the received `tools` and `tool_choice` in the initial class instance attributes to pass them as parameters during the Palmyra LLM call while using `ChatWriter` invocation. This approach allows the support of different tool types, e.g. `function` and `graph`. `Graph` is one of the remotely called Writer Palmyra tools. For further information visit our [docs](https://dev.writer.com/api-guides/knowledge-graph#knowledge-graph). \n", + "The `ChatWriter.bind_tools()` method does not create a new instance with bound tools, but stores the received `tools` and `tool_choice` in the initial class instance attributes to pass them as parameters during the Palmyra LLM call while using `ChatWriter` invocation. This approach allows the support of different tool types, e.g. `function` and `graph`. `Graph` is one of the remotely called Writer Palmyra tools. For further information, visit our [docs](https://dev.writer.com/api-guides/knowledge-graph#knowledge-graph). \n", "\n", "For more information about tool usage in LangChain, visit the [LangChain tool calling documentation](https://python.langchain.com/docs/concepts/tool_calling/)." ] @@ -373,7 +373,7 @@ "source": [ "## Prompt templates\n", "\n", - "[Prompt templates](https://python.langchain.com/docs/concepts/prompt_templates/) help to translate user input and parameters into instructions for a language model. You can use `ChatWriter` with a prompt templates like so:\n" + "[Prompt templates](https://python.langchain.com/docs/concepts/prompt_templates/) help to translate user input and parameters into instructions for a language model. You can use `ChatWriter` with a prompt template like so:\n" ] }, { @@ -411,7 +411,7 @@ "metadata": {}, "source": [ "## API reference\n", - "For detailed documentation of all ChatWriter features and configurations head to the [API reference](https://python.langchain.com/api_reference/writer/chat_models/langchain_writer.chat_models.ChatWriter.html#langchain_writer.chat_models.ChatWriter).\n", + "For detailed documentation of all ChatWriter features and configurations, head to the [API reference](https://python.langchain.com/api_reference/writer/chat_models/langchain_writer.chat_models.ChatWriter.html#langchain_writer.chat_models.ChatWriter).\n", "\n", "## Additional resources\n", "You can find information about Writer's models (including costs, context windows, and supported input types) and tools in the [Writer docs](https://dev.writer.com/home)." diff --git a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py index df63c1f3ddb..cceed0e1e5a 100644 --- a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py +++ b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py @@ -775,12 +775,12 @@ class ChatHuggingFace(BaseChatModel): elif _is_huggingface_pipeline(self.llm): from transformers import AutoTokenizer # type: ignore[import] + self.model_id = self.model_id or self.llm.model_id self.tokenizer = ( AutoTokenizer.from_pretrained(self.model_id) if self.tokenizer is None else self.tokenizer ) - self.model_id = self.llm.model_id return elif _is_huggingface_endpoint(self.llm): self.model_id = self.llm.repo_id or self.llm.model