Advertisement
Guest User

Type Error

a guest
Feb 2nd, 2024
134
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.15 KB | None | 0 0
  1. TypeError Traceback (most recent call last)
  2. Cell In[17], line 10
  3. 7 break
  4. 9 #processed_query = embeddings_model.encode([query])
  5. ---> 10 result = qa({"question":query,"chat_history":chat_history})
  6. 12 pattern = r'Helpful Answer:.*'
  7. 13 match = re.search(pattern, result['answer'], re.DOTALL)
  8.  
  9. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\chains\base.py:310, in Chain.__call__(self, inputs, return_only_outputs, callbacks, tags, metadata, run_name, include_run_info)
  10. 308 except BaseException as e:
  11. 309 run_manager.on_chain_error(e)
  12. --> 310 raise e
  13. 311 run_manager.on_chain_end(outputs)
  14. 312 final_outputs: Dict[str, Any] = self.prep_outputs(
  15. 313 inputs, outputs, return_only_outputs
  16. 314 )
  17.  
  18. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\chains\base.py:304, in Chain.__call__(self, inputs, return_only_outputs, callbacks, tags, metadata, run_name, include_run_info)
  19. 297 run_manager = callback_manager.on_chain_start(
  20. 298 dumpd(self),
  21. 299 inputs,
  22. 300 name=run_name,
  23. 301 )
  24. 302 try:
  25. 303 outputs = (
  26. --> 304 self._call(inputs, run_manager=run_manager)
  27. 305 if new_arg_supported
  28. 306 else self._call(inputs)
  29. 307 )
  30. 308 except BaseException as e:
  31. 309 run_manager.on_chain_error(e)
  32.  
  33. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\chains\conversational_retrieval\base.py:148, in BaseConversationalRetrievalChain._call(self, inputs, run_manager)
  34. 144 accepts_run_manager = (
  35. 145 "run_manager" in inspect.signature(self._get_docs).parameters
  36. 146 )
  37. 147 if accepts_run_manager:
  38. --> 148 docs = self._get_docs(new_question, inputs, run_manager=_run_manager)
  39. 149 else:
  40. 150 docs = self._get_docs(new_question, inputs) # type: ignore[call-arg]
  41.  
  42. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\chains\conversational_retrieval\base.py:305, in ConversationalRetrievalChain._get_docs(self, question, inputs, run_manager)
  43. 297 def _get_docs(
  44. 298 self,
  45. 299 question: str,
  46. (...)
  47. 302 run_manager: CallbackManagerForChainRun,
  48. 303 ) -> List[Document]:
  49. 304 """Get docs."""
  50. --> 305 docs = self.retriever.get_relevant_documents(
  51. 306 question, callbacks=run_manager.get_child()
  52. 307 )
  53. 308 return self._reduce_tokens_below_limit(docs)
  54.  
  55. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\schema\retriever.py:211, in BaseRetriever.get_relevant_documents(self, query, callbacks, tags, metadata, run_name, **kwargs)
  56. 209 except Exception as e:
  57. 210 run_manager.on_retriever_error(e)
  58. --> 211 raise e
  59. 212 else:
  60. 213 run_manager.on_retriever_end(
  61. 214 result,
  62. 215 **kwargs,
  63. 216 )
  64.  
  65. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\schema\retriever.py:204, in BaseRetriever.get_relevant_documents(self, query, callbacks, tags, metadata, run_name, **kwargs)
  66. 202 _kwargs = kwargs if self._expects_other_args else {}
  67. 203 if self._new_arg_supported:
  68. --> 204 result = self._get_relevant_documents(
  69. 205 query, run_manager=run_manager, **_kwargs
  70. 206 )
  71. 207 else:
  72. 208 result = self._get_relevant_documents(query, **_kwargs)
  73.  
  74. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\schema\vectorstore.py:656, in VectorStoreRetriever._get_relevant_documents(self, query, run_manager)
  75. 652 def _get_relevant_documents(
  76. 653 self, query: str, *, run_manager: CallbackManagerForRetrieverRun
  77. 654 ) -> List[Document]:
  78. 655 if self.search_type == "similarity":
  79. --> 656 docs = self.vectorstore.similarity_search(query, **self.search_kwargs)
  80. 657 elif self.search_type == "similarity_score_threshold":
  81. 658 docs_and_similarities = (
  82. 659 self.vectorstore.similarity_search_with_relevance_scores(
  83. 660 query, **self.search_kwargs
  84. 661 )
  85. 662 )
  86.  
  87. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\vectorstores\faiss.py:509, in FAISS.similarity_search(self, query, k, filter, fetch_k, **kwargs)
  88. 489 def similarity_search(
  89. 490 self,
  90. 491 query: str,
  91. (...)
  92. 495 **kwargs: Any,
  93. 496 ) -> List[Document]:
  94. 497 """Return docs most similar to query.
  95. 498
  96. 499 Args:
  97. (...)
  98. 507 List of Documents most similar to the query.
  99. 508 """
  100. --> 509 docs_and_scores = self.similarity_search_with_score(
  101. 510 query, k, filter=filter, fetch_k=fetch_k, **kwargs
  102. 511 )
  103. 512 return [doc for doc, _ in docs_and_scores]
  104.  
  105. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\vectorstores\faiss.py:390, in FAISS.similarity_search_with_score(self, query, k, filter, fetch_k, **kwargs)
  106. 369 def similarity_search_with_score(
  107. 370 self,
  108. 371 query: str,
  109. (...)
  110. 375 **kwargs: Any,
  111. 376 ) -> List[Tuple[Document, float]]:
  112. 377 """Return docs most similar to query.
  113. 378
  114. 379 Args:
  115. (...)
  116. 388 L2 distance in float. Lower score represents more similarity.
  117. 389 """
  118. --> 390 embedding = self._embed_query(query)
  119. 391 docs = self.similarity_search_with_score_by_vector(
  120. 392 embedding,
  121. 393 k,
  122. (...)
  123. 396 **kwargs,
  124. 397 )
  125. 398 return docs
  126.  
  127. File D:\anaconda3\envs\gpt\Lib\site-packages\langchain\vectorstores\faiss.py:155, in FAISS._embed_query(self, text)
  128. 153 return self.embedding_function.embed_query(text)
  129. 154 else:
  130. --> 155 return self.embedding_function(text)
  131.  
  132. File D:\anaconda3\envs\gpt\Lib\site-packages\torch\nn\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
  133. 1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
  134. 1517 else:
  135. -> 1518 return self._call_impl(*args, **kwargs)
  136.  
  137. File D:\anaconda3\envs\gpt\Lib\site-packages\torch\nn\modules\module.py:1527, in Module._call_impl(self, *args, **kwargs)
  138. 1522 # If we don't have any hooks, we want to skip the rest of the logic in
  139. 1523 # this function, and just call forward.
  140. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
  141. 1525 or _global_backward_pre_hooks or _global_backward_hooks
  142. 1526 or _global_forward_hooks or _global_forward_pre_hooks):
  143. -> 1527 return forward_call(*args, **kwargs)
  144. 1529 try:
  145. 1530 result = None
  146.  
  147. File D:\anaconda3\envs\gpt\Lib\site-packages\torch\nn\modules\container.py:215, in Sequential.forward(self, input)
  148. 213 def forward(self, input):
  149. 214 for module in self:
  150. --> 215 input = module(input)
  151. 216 return input
  152.  
  153. File D:\anaconda3\envs\gpt\Lib\site-packages\torch\nn\modules\module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
  154. 1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
  155. 1517 else:
  156. -> 1518 return self._call_impl(*args, **kwargs)
  157.  
  158. File D:\anaconda3\envs\gpt\Lib\site-packages\torch\nn\modules\module.py:1527, in Module._call_impl(self, *args, **kwargs)
  159. 1522 # If we don't have any hooks, we want to skip the rest of the logic in
  160. 1523 # this function, and just call forward.
  161. 1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
  162. 1525 or _global_backward_pre_hooks or _global_backward_hooks
  163. 1526 or _global_forward_hooks or _global_forward_pre_hooks):
  164. -> 1527 return forward_call(*args, **kwargs)
  165. 1529 try:
  166. 1530 result = None
  167.  
  168. File D:\anaconda3\envs\gpt\Lib\site-packages\sentence_transformers\models\Transformer.py:62, in Transformer.forward(self, features)
  169. 60 def forward(self, features):
  170. 61 """Returns token_embeddings, cls_token"""
  171. ---> 62 trans_features = {'input_ids': features['input_ids'], 'attention_mask': features['attention_mask']}
  172. 63 if 'token_type_ids' in features:
  173. 64 trans_features['token_type_ids'] = features['token_type_ids']
  174.  
  175. TypeError: string indices must be integers, not 'str'
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement