mirror of
				https://github.com/nomic-ai/gpt4all.git
				synced 2025-10-31 13:51:43 +00:00 
			
		
		
		
	Python chat streaming (#1127)
* Support streaming in chat session * Uncommented tests
This commit is contained in:
		| @@ -210,9 +210,6 @@ class GPT4All: | ||||
|         if n_predict is not None: | ||||
|             generate_kwargs['n_predict'] = n_predict | ||||
|  | ||||
|         if streaming and self._is_chat_session_activated: | ||||
|             raise NotImplementedError("Streaming tokens in a chat session is not currently supported.") | ||||
|  | ||||
|         if self._is_chat_session_activated: | ||||
|             self.current_chat_session.append({"role": "user", "content": prompt}) | ||||
|             generate_kwargs['prompt'] = self._format_chat_prompt_template(messages=self.current_chat_session) | ||||
|   | ||||
| @@ -25,11 +25,13 @@ def test_inference(): | ||||
|     assert len(tokens) > 0 | ||||
|  | ||||
|     with model.chat_session(): | ||||
|         try: | ||||
|             response = model.generate(prompt='hello', top_k=1, streaming=True) | ||||
|             assert False | ||||
|         except NotImplementedError: | ||||
|             assert True | ||||
|         tokens = list(model.generate(prompt='hello', top_k=1, streaming=True)) | ||||
|         model.current_chat_session.append({'role': 'assistant', 'content': ''.join(tokens)}) | ||||
|  | ||||
|         tokens = list(model.generate(prompt='write me a poem about dogs', top_k=1, streaming=True)) | ||||
|         model.current_chat_session.append({'role': 'assistant', 'content': ''.join(tokens)}) | ||||
|  | ||||
|         print(model.current_chat_session) | ||||
|  | ||||
|  | ||||
| def do_long_input(model): | ||||
|   | ||||
		Reference in New Issue
	
	Block a user