BlockedPromptException: block_reason: OTHER
Opened this issue · 2 comments
dhruv21995 commented
2024-07-03 13:16:24.042 Uncaught app exception
Traceback (most recent call last):
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 600, in _run_script
exec(code, module.__dict__)
File "C:\Users\BT428FR\gemini_multipdf_chat\app.py", line 169, in <module>
main()
File "C:\Users\BT428FR\gemini_multipdf_chat\app.py", line 154, in main
response = user_input(prompt)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\gemini_multipdf_chat\app.py", line 98, in user_input
response = chain.invoke(
^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\base.py", line 166, in invoke
raise e
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\base.py", line 156, in invoke
self._call(inputs, run_manager=run_manager)
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\combine_documents\base.py", line 137, in _call
output, extra_return_dict = self.combine_docs(
^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\combine_documents\stuff.py", line 244, in combine_docs
return self.llm_chain.predict(callbacks=callbacks, **inputs), {}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\llm.py", line 316, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_core\_api\deprecation.py", line 148, in warning_emitting_wrapper
return wrapped(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\base.py", line 383, in __call__
return self.invoke(
^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\base.py", line 166, in invoke
raise e
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\base.py", line 156, in invoke
self._call(inputs, run_manager=run_manager)
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\llm.py", line 126, in _call
response = self.generate([inputs], run_manager=run_manager)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain\chains\llm.py", line 138, in generate
return self.llm.generate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_core\language_models\chat_models.py", line 599, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_core\language_models\chat_models.py", line 456, in generate
raise e
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_core\language_models\chat_models.py", line 446, in generate
self._generate_with_cache(
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_core\language_models\chat_models.py", line 671, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_google_genai\chat_models.py", line 666, in _generate
response: genai.types.GenerateContentResponse = _chat_with_retry(
^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_google_genai\chat_models.py", line 171, in _chat_with_retry
return _chat_with_retry(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\tenacity\__init__.py", line 289, in wrapped_f
return self(f, *args, **kw)
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\tenacity\__init__.py", line 379, in __call__
do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\tenacity\__init__.py", line 314, in iter
return fut.result()
^^^^^^^^^^^^
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.12_3.12.1008.0_x64__qbz5n2kfra8p0\Lib\concurrent\futures\_base.py",
line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.12_3.12.1008.0_x64__qbz5n2kfra8p0\Lib\concurrent\futures\_base.py",
line 401, in __get_result
raise self._exception
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\tenacity\__init__.py", line 382, in __call__
result = fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_google_genai\chat_models.py", line 169, in _chat_with_retry
raise e
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\langchain_google_genai\chat_models.py", line 153, in _chat_with_retry
return generation_method(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\google\generativeai\generative_models.py", line 505, in send_message
self._check_response(response=response, stream=stream)
File "C:\Users\BT428FR\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0\LocalCache\local-packages\Python312\site-packages\google\generativeai\generative_models.py", line 524, in _check_response
raise generation_types.BlockedPromptException(response.prompt_feedback)
google.generativeai.types.generation_types.BlockedPromptException: block_reason: OTHER
dhruv21995 commented
i made some changes to the code & it still works fine. changed the chain.call to chain.invoke due to deprecation. uploaded a couple PDFs. it worked fine for 1 but failed for another giving this issue. plz help.
kaifcoder commented
Check your prompt as it gets blocked by the gemini may be context contains some offensive stuff for the llm