Last answered:

03 Sept 2024

Posted on:

03 Sept 2024

0

Error Connecting - create() function

5 answers ( 0 marked as helpful)
Instructor
Posted on:

03 Sept 2024

0
Hey Rahul,
Thank you for reaching out.
Please, proceed with the lessons and later notebook files where we fill in the dictionaries. The code should then run smoothly.
Let me know if you experience other issues. 
Kind regards,
365 Hristina
Posted on:

03 Sept 2024

0
Thanks for the reply, i did face the same issue on executing the "04 The OpenAPI 03" 
Attaching the error log
File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_transports\default.py:72, in map_httpcore_exceptions()
     71 try:
---> 72     yield
     73 except Exception as exc:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_transports\default.py:236, in HTTPTransport.handle_request(self, request)
    235 with map_httpcore_exceptions():
--> 236     resp = self._pool.handle_request(req)
    238 assert isinstance(resp.stream, typing.Iterable)

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_sync\connection_pool.py:216, in ConnectionPool.handle_request(self, request)
    215     self._close_connections(closing)
--> 216     raise exc from None
    218 # Return the response. Note that in this case we still have to manage
    219 # the point at which the response is closed.

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_sync\connection_pool.py:196, in ConnectionPool.handle_request(self, request)
    194 try:
    195     # Send the request on the assigned connection.
--> 196     response = connection.handle_request(
    197         pool_request.request
    198     )
    199 except ConnectionNotAvailable:
    200     # In some cases a connection may initially be available to
    201     # handle a request, but then become unavailable.
    202     #
    203     # In this case we clear the connection and try again.

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_sync\connection.py:99, in HTTPConnection.handle_request(self, request)
     98     self._connect_failed = True
---> 99     raise exc
    101 return self._connection.handle_request(request)

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_sync\connection.py:76, in HTTPConnection.handle_request(self, request)
     75 if self._connection is None:
---> 76     stream = self._connect(request)
     78     ssl_object = stream.get_extra_info("ssl_object")

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_sync\connection.py:154, in HTTPConnection._connect(self, request)
    153 with Trace("start_tls", logger, request, kwargs) as trace:
--> 154     stream = stream.start_tls(**kwargs)
    155     trace.return_value = stream

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_backends\sync.py:152, in SyncStream.start_tls(self, ssl_context, server_hostname, timeout)
    148 exc_map: ExceptionMapping = {
    149     socket.timeout: ConnectTimeout,
    150     OSError: ConnectError,
    151 }
--> 152 with map_exceptions(exc_map):
    153     try:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)
    152 try:
--> 153     self.gen.throw(typ, value, traceback)
    154 except StopIteration as exc:
    155     # Suppress StopIteration *unless* it's the same exception that
    156     # was passed to throw().  This prevents a StopIteration
    157     # raised inside the "with" statement from being suppressed.

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpcore\_exceptions.py:14, in map_exceptions(map)
     13     if isinstance(exc, from_exc):
---> 14         raise to_exc(exc) from exc
     15 raise

ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)

The above exception was the direct cause of the following exception:

ConnectError                              Traceback (most recent call last)
File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:973, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
    972 try:
--> 973     response = self._client.send(
    974         request,
    975         stream=stream or self._should_stream_response_body(request=request),
    976         **kwargs,
    977     )
    978 except httpx.TimeoutException as err:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_client.py:926, in Client.send(self, request, stream, auth, follow_redirects)
    924 auth = self._build_request_auth(request, auth)
--> 926 response = self._send_handling_auth(
    927     request,
    928     auth=auth,
    929     follow_redirects=follow_redirects,
    930     history=[],
    931 )
    932 try:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_client.py:954, in Client._send_handling_auth(self, request, auth, follow_redirects, history)
    953 while True:
--> 954     response = self._send_handling_redirects(
    955         request,
    956         follow_redirects=follow_redirects,
    957         history=history,
    958     )
    959     try:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_client.py:991, in Client._send_handling_redirects(self, request, follow_redirects, history)
    989     hook(request)
--> 991 response = self._send_single_request(request)
    992 try:

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_client.py:1027, in Client._send_single_request(self, request)
   1026 with request_context(request=request):
-> 1027     response = transport.handle_request(request)
   1029 assert isinstance(response.stream, SyncByteStream)

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_transports\default.py:235, in HTTPTransport.handle_request(self, request)
    223 req = httpcore.Request(
    224     method=request.method,
    225     url=httpcore.URL(
   (...)
    233     extensions=request.extensions,
    234 )
--> 235 with map_httpcore_exceptions():
    236     resp = self._pool.handle_request(req)

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)
    152 try:
--> 153     self.gen.throw(typ, value, traceback)
    154 except StopIteration as exc:
    155     # Suppress StopIteration *unless* it's the same exception that
    156     # was passed to throw().  This prevents a StopIteration
    157     # raised inside the "with" statement from being suppressed.

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\httpx\_transports\default.py:89, in map_httpcore_exceptions()
     88 message = str(exc)
---> 89 raise mapped_exc(message) from exc

ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:997)

The above exception was the direct cause of the following exception:

APIConnectionError                        Traceback (most recent call last)
Cell In[9], line 1
----> 1 completion = client.chat.completions.create(model = 'gpt-4', 
      2                                             messages = [{'role':'system', 
      3                                                          'content':''' You are Marv, a chatbot that reluctantly 
      4                                                          answers questions with sarcastic responses. '''}, 
      5                                                         {'role':'user', 
      6                                                          'content':''' I've recently adopted a dog. 
      7                                                          Could you suggest some dog names? '''}])

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_utils\_utils.py:274, in required_args..inner..wrapper(*args, **kwargs)
    272             msg = f"Missing required argument: {quote(missing[0])}"
    273     raise TypeError(msg)
--> 274 return func(*args, **kwargs)

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\resources\chat\completions.py:668, in Completions.create(self, messages, model, frequency_penalty, function_call, functions, logit_bias, logprobs, max_tokens, n, parallel_tool_calls, presence_penalty, response_format, seed, service_tier, stop, stream, stream_options, temperature, tool_choice, tools, top_logprobs, top_p, user, extra_headers, extra_query, extra_body, timeout)
    633 @required_args(["messages", "model"], ["messages", "model", "stream"])
    634 def create(
    635     self,
   (...)
    665     timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
    666 ) -> ChatCompletion | Stream[ChatCompletionChunk]:
    667     validate_response_format(response_format)
--> 668     return self._post(
    669         "/chat/completions",
    670         body=maybe_transform(
    671             {
    672                 "messages": messages,
    673                 "model": model,
    674                 "frequency_penalty": frequency_penalty,
    675                 "function_call": function_call,
    676                 "functions": functions,
    677                 "logit_bias": logit_bias,
    678                 "logprobs": logprobs,
    679                 "max_tokens": max_tokens,
    680                 "n": n,
    681                 "parallel_tool_calls": parallel_tool_calls,
    682                 "presence_penalty": presence_penalty,
    683                 "response_format": response_format,
    684                 "seed": seed,
    685                 "service_tier": service_tier,
    686                 "stop": stop,
    687                 "stream": stream,
    688                 "stream_options": stream_options,
    689                 "temperature": temperature,
    690                 "tool_choice": tool_choice,
    691                 "tools": tools,
    692                 "top_logprobs": top_logprobs,
    693                 "top_p": top_p,
    694                 "user": user,
    695             },
    696             completion_create_params.CompletionCreateParams,
    697         ),
    698         options=make_request_options(
    699             extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
    700         ),
    701         cast_to=ChatCompletion,
    702         stream=stream or False,
    703         stream_cls=Stream[ChatCompletionChunk],
    704     )

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:1260, in SyncAPIClient.post(self, path, cast_to, body, options, files, stream, stream_cls)
   1246 def post(
   1247     self,
   1248     path: str,
   (...)
   1255     stream_cls: type[_StreamT] | None = None,
   1256 ) -> ResponseT | _StreamT:
   1257     opts = FinalRequestOptions.construct(
   1258         method="post", url=path, json_data=body, files=to_httpx_files(files), **options
   1259     )
-> 1260     return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:937, in SyncAPIClient.request(self, cast_to, options, remaining_retries, stream, stream_cls)
    928 def request(
    929     self,
    930     cast_to: Type[ResponseT],
   (...)
    935     stream_cls: type[_StreamT] | None = None,
    936 ) -> ResponseT | _StreamT:
--> 937     return self._request(
    938         cast_to=cast_to,
    939         options=options,
    940         stream=stream,
    941         stream_cls=stream_cls,
    942         remaining_retries=remaining_retries,
    943     )

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:997, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
    994 log.debug("Encountered Exception", exc_info=True)
    996 if retries > 0:
--> 997     return self._retry_request(
    998         input_options,
    999         cast_to,
   1000         retries,
   1001         stream=stream,
   1002         stream_cls=stream_cls,
   1003         response_headers=None,
   1004     )
   1006 log.debug("Raising connection error")
   1007 raise APIConnectionError(request=request) from err

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:1075, in SyncAPIClient._retry_request(self, options, cast_to, remaining_retries, response_headers, stream, stream_cls)
   1071 # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a
   1072 # different thread if necessary.
   1073 time.sleep(timeout)
-> 1075 return self._request(
   1076     options=options,
   1077     cast_to=cast_to,
   1078     remaining_retries=remaining,
   1079     stream=stream,
   1080     stream_cls=stream_cls,
   1081 )

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:997, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
    994 log.debug("Encountered Exception", exc_info=True)
    996 if retries > 0:
--> 997     return self._retry_request(
    998         input_options,
    999         cast_to,
   1000         retries,
   1001         stream=stream,
   1002         stream_cls=stream_cls,
   1003         response_headers=None,
   1004     )
   1006 log.debug("Raising connection error")
   1007 raise APIConnectionError(request=request) from err

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:1075, in SyncAPIClient._retry_request(self, options, cast_to, remaining_retries, response_headers, stream, stream_cls)
   1071 # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a
   1072 # different thread if necessary.
   1073 time.sleep(timeout)
-> 1075 return self._request(
   1076     options=options,
   1077     cast_to=cast_to,
   1078     remaining_retries=remaining,
   1079     stream=stream,
   1080     stream_cls=stream_cls,
   1081 )

File ~\AppData\Local\anaconda3\envs\langchain_env\lib\site-packages\openai\_base_client.py:1007, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
    997         return self._retry_request(
    998             input_options,
    999             cast_to,
   (...)
   1003             response_headers=None,
   1004         )
   1006     log.debug("Raising connection error")
-> 1007     raise APIConnectionError(request=request) from err
   1009 log.debug(
   1010     'HTTP Response: %s %s "%i %s" %s',
   1011     request.method,
   (...)
   1015     response.headers,
   1016 )
   1017 log.debug("request_id: %s", response.headers.get("x-request-id"))

APIConnectionError: Connection error.
Instructor
Posted on:

03 Sept 2024

0
Hey Rahul,
It seems you are getting a connection error. Do you have the text file with your API key in the same directory as the notebook?
Best,
365 Hristina
Posted on:

03 Sept 2024

0
Yes, I do.

Instructor
Posted on:

03 Sept 2024

0
Hey,
Please take a look at OpenAI's recommended solutions to this error, more specifically, the APIConnectionError:
Upon studying the error message, the problem is likely connected to your SSL certificates.
Kind regards,
365 Hristina

Submit an answer