You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 32, in error_handler
yield
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 40, in readme_agent
asyncio.run(readme_generator(config, output_file))
File "/usr/local/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 56, in readme_generator
responses = await llm.batch_request()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 102, in batch_request
summaries_responses = await self._batch_prompts(summaries_prompts)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 122, in _batch_prompts
batch_responses = await asyncio.gather(
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 142, in _process_batch
return await self._make_request_code_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 177, in _make_request_code_summary
_, summary_or_error = await self._make_request(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 189, in async_wrapped
return await copy(fn, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 111, in call
do = await self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 153, in iter
result = await action(retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/_utils.py", line 99, in inner
return call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/init.py", line 419, in exc_check
raise retry_exc from fut.exception()
tenacity.RetryError: RetryError[<Future at 0x7f866f8217d0 state=finished raised ClientResponseError>]
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/bin/readmeai", line 8, in
sys.exit(main())
^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/cli/main.py", line 90, in main
readme_agent(config=config, output_file=output)
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 39, in readme_agent
with error_handler():
File "/usr/local/lib/python3.11/contextlib.py", line 155, in exit
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 34, in error_handler
raise ReadmeGeneratorError(e) from e
readmeai.errors.ReadmeGeneratorError: Error generating README: RetryError[<Future at 0x7f866f8217d0 state=finished raised ClientResponseError>]
The text was updated successfully, but these errors were encountered:
-OpenAI
-Docker Desktop
-Windows 10
Hi, I followed the configuration steps through Docker, but I couldn't get the readmeai to work and it just gives me this error:
I wanted help to solve this because I really wanted to use this tool for my projects
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/ignore_list.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.633229Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/languages.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.634357Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/parsers.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.635736Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/prompts.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.637616Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/tool_config.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.647523Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Configuration file loaded: /usr/local/lib/python3.11/site-packages/readmeai/config/settings/tooling.toml",
"level": "info",
"logger": "readmeai.config.settings",
"timestamp": "2024-11-25T23:36:44.650413Z",
"filename": "settings.py",
"func_name": "_load_settings",
"lineno": 276
}
{
"event": "Pydantic settings: dict_keys(['config', 'ignore_list', 'languages', 'parsers', 'prompts', 'tool_config', 'tooling'])",
"level": "info",
"logger": "readmeai.cli.main",
"timestamp": "2024-11-25T23:36:44.667618Z",
"filename": "main.py",
"func_name": "main",
"lineno": 86
}
{
"event": "Repository settings: repository='https://github.com/LucasRyuMuraoka/VinylDays' full_name='LucasRyuMuraoka/VinylDays' host_domain='github.com' host='github' name='VinylDays'",
"level": "info",
"logger": "readmeai.cli.main",
"timestamp": "2024-11-25T23:36:44.668179Z",
"filename": "main.py",
"func_name": "main",
"lineno": 87
}
{
"event": "LLM API settings: api='OPENAI' base_url='https://api.openai.com/v1/chat/completions' context_window=3900 encoder='cl100k_base' host_name=AnyHttpUrl('https://api.openai.com/') localhost=AnyHttpUrl('http://localhost:11434/') model='gpt-3.5-turbo' path='v1/chat/completions' temperature=0.1 tokens=699 top_p=0.9",
"level": "info",
"logger": "readmeai.cli.main",
"timestamp": "2024-11-25T23:36:44.668529Z",
"filename": "main.py",
"func_name": "main",
"lineno": 88
}
{
"event": "Total files analyzed: 109",
"level": "info",
"logger": "readmeai.main",
"timestamp": "2024-11-25T23:36:46.110358Z",
"filename": "main.py",
"func_name": "log_repository_context",
"lineno": 102
}
{
"event": "Metadata extracted: {'cicd': {}, 'containers': {'docker': 'Back-End/.env'}, 'documentation': {}, 'package_managers': {'npm': 'Front-End/package.json, Back-End/package.json'}}",
"level": "info",
"logger": "readmeai.main",
"timestamp": "2024-11-25T23:36:46.110582Z",
"filename": "main.py",
"func_name": "log_repository_context",
"lineno": 103
}
{
"event": "Dependencies: ['docker', 'npm', 'style.module.css.map', 'global.module.css.map', '.env', 'html', '.env.example', 'sassy css', 'sql', 'package.json', 'style.css.map', 'css', 'javascript']",
"level": "info",
"logger": "readmeai.main",
"timestamp": "2024-11-25T23:36:46.110812Z",
"filename": "main.py",
"func_name": "log_repository_context",
"lineno": 104
}
{
"event": "Languages: {'map': 3, 'scss': 3, 'css': 4, 'html': 2, 'json': 2, 'js': 90, 'example': 1, 'sql': 3}",
"level": "info",
"logger": "readmeai.main",
"timestamp": "2024-11-25T23:36:46.111055Z",
"filename": "main.py",
"func_name": "log_repository_context",
"lineno": 105
}
{
"event": "Error processing request for 'HTML/pages/public/global/global.module.css.map': ClientResponseError(RequestInfo(url=URL('https://api.openai.com/v1/chat/completions'), method='POST', headers=<CIMultiDictProxy('Host': 'api.openai.com', 'Authorization': 'Bearer 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', 'Accept': '/', 'Accept-Encoding': 'gzip, deflate', 'User-Agent': 'Python/3.11 aiohttp/3.11.7', 'Content-Length': '1798', 'Content-Type': 'application/json')>, real_url=URL('https://api.openai.com/v1/chat/completions')), (), status=429, message='Too Many Requests', headers=<CIMultiDictProxy('Date': 'Mon, 25 Nov 2024 23:36:50 GMT', 'Content-Type': 'application/json; charset=utf-8', 'Content-Length': '337', 'Connection': 'keep-alive', 'Vary': 'Origin', 'x-request-id': 'req_3298025bb51baa47860ed1cb0851aa39', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'Set-Cookie': '__cf_bm=iMN6YwXVZ6aIn6SKMASfxMeIi6SsX2K23M5TS59Tzu4-1732577810-1.0.1.1-Refoug0EMjA_pI.K9GJyYlyWBWQ4mnFbPyszluVe6EWoRSge08X8x6XzXhS93faByKS5mSid9nO4pTs.r5D2NQ; path=/; expires=Tue, 26-Nov-24 00:06:50 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None', 'X-Content-Type-Options': 'nosniff', 'Set-Cookie': '_cfuvid=2j2ZfnEapKXbSjymovhIL0A3W4yWa9zztzXssEIAWLg-1732577810618-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None', 'Server': 'cloudflare', 'CF-RAY': '8e85768bda704ee9-GRU', 'alt-svc': 'h3=":443"; ma=86400')>)",
"token_count": 505,
"max_count": 3900,
"index": "HTML/pages/public/global/global.module.css.map",
"level": "error",
"logger": "readmeai.models.base",
"timestamp": "2024-11-25T23:36:50.071973Z",
"filename": "openai.py",
"func_name": "_make_request",
"lineno": 113
}
{
"event": "Error processing request for 'HTML/pages/public/global/global.module.css.map': ClientResponseError(RequestInfo(url=URL('https://api.openai.com/v1/chat/completions'), method='POST', headers=<CIMultiDictProxy('Host': 'api.openai.com', 'Authorization': 'Bearer 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', 'Accept': '/', 'Accept-Encoding': 'gzip, deflate', 'User-Agent': 'Python/3.11 aiohttp/3.11.7', 'Cookie': '__cf_bm=iMN6YwXVZ6aIn6SKMASfxMeIi6SsX2K23M5TS59Tzu4-1732577810-1.0.1.1-Refoug0EMjA_pI.K9GJyYlyWBWQ4mnFbPyszluVe6EWoRSge08X8x6XzXhS93faByKS5mSid9nO4pTs.r5D2NQ; _cfuvid=2j2ZfnEapKXbSjymovhIL0A3W4yWa9zztzXssEIAWLg-1732577810618-0.0.1.1-604800000', 'Content-Length': '1798', 'Content-Type': 'application/json')>, real_url=URL('https://api.openai.com/v1/chat/completions')), (), status=429, message='Too Many Requests', headers=<CIMultiDictProxy('Date': 'Mon, 25 Nov 2024 23:36:54 GMT', 'Content-Type': 'application/json; charset=utf-8', 'Content-Length': '337', 'Connection': 'keep-alive', 'Vary': 'Origin', 'x-request-id': 'req_9b57be27e222f01a1213a82b40faf2d6', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8e8576ad88d74ee9-GRU', 'alt-svc': 'h3=":443"; ma=86400')>)",
"token_count": 505,
"max_count": 3900,
"index": "HTML/pages/public/global/global.module.css.map",
"level": "error",
"logger": "readmeai.models.base",
"timestamp": "2024-11-25T23:36:54.310279Z",
"filename": "openai.py",
"func_name": "_make_request",
"lineno": 113
}
{
"event": "Error processing request for 'HTML/pages/public/global/global.module.css.map': ClientResponseError(RequestInfo(url=URL('https://api.openai.com/v1/chat/completions'), method='POST', headers=<CIMultiDictProxy('Host': 'api.openai.com', 'Authorization': 'Bearer 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', 'Accept': '/', 'Accept-Encoding': 'gzip, deflate', 'User-Agent': 'Python/3.11 aiohttp/3.11.7', 'Cookie': '__cf_bm=iMN6YwXVZ6aIn6SKMASfxMeIi6SsX2K23M5TS59Tzu4-1732577810-1.0.1.1-Refoug0EMjA_pI.K9GJyYlyWBWQ4mnFbPyszluVe6EWoRSge08X8x6XzXhS93faByKS5mSid9nO4pTs.r5D2NQ; _cfuvid=2j2ZfnEapKXbSjymovhIL0A3W4yWa9zztzXssEIAWLg-1732577810618-0.0.1.1-604800000', 'Content-Length': '1798', 'Content-Type': 'application/json')>, real_url=URL('https://api.openai.com/v1/chat/completions')), (), status=429, message='Too Many Requests', headers=<CIMultiDictProxy('Date': 'Mon, 25 Nov 2024 23:36:59 GMT', 'Content-Type': 'application/json; charset=utf-8', 'Content-Length': '337', 'Connection': 'keep-alive', 'Vary': 'Origin', 'x-request-id': 'req_84dad125c3febf7a4a466ffd65a30603', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8e8576c7fb134ee9-GRU', 'alt-svc': 'h3=":443"; ma=86400')>)",
"token_count": 505,
"max_count": 3900,
"index": "HTML/pages/public/global/global.module.css.map",
"level": "error",
"logger": "readmeai.models.base",
"timestamp": "2024-11-25T23:36:58.533948Z",
"filename": "openai.py",
"func_name": "_make_request",
"lineno": 113
}
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 114, in call
result = await fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/openai.py", line 99, in _make_request
response.raise_for_status()
File "/usr/local/lib/python3.11/site-packages/aiohttp/client_reqrep.py", line 1135, in raise_for_status
raise ClientResponseError(
aiohttp.client_exceptions.ClientResponseError: 429, message='Too Many Requests', url='https://api.openai.com/v1/chat/completions'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 32, in error_handler
yield
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 40, in readme_agent
asyncio.run(readme_generator(config, output_file))
File "/usr/local/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 56, in readme_generator
responses = await llm.batch_request()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 102, in batch_request
summaries_responses = await self._batch_prompts(summaries_prompts)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 122, in _batch_prompts
batch_responses = await asyncio.gather(
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 142, in _process_batch
return await self._make_request_code_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/models/base.py", line 177, in _make_request_code_summary
_, summary_or_error = await self._make_request(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 189, in async_wrapped
return await copy(fn, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 111, in call
do = await self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/asyncio/init.py", line 153, in iter
result = await action(retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/_utils.py", line 99, in inner
return call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/tenacity/init.py", line 419, in exc_check
raise retry_exc from fut.exception()
tenacity.RetryError: RetryError[<Future at 0x7f866f8217d0 state=finished raised ClientResponseError>]
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/bin/readmeai", line 8, in
sys.exit(main())
^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/readmeai/cli/main.py", line 90, in main
readme_agent(config=config, output_file=output)
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 39, in readme_agent
with error_handler():
File "/usr/local/lib/python3.11/contextlib.py", line 155, in exit
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/readmeai/main.py", line 34, in error_handler
raise ReadmeGeneratorError(e) from e
readmeai.errors.ReadmeGeneratorError: Error generating README: RetryError[<Future at 0x7f866f8217d0 state=finished raised ClientResponseError>]
The text was updated successfully, but these errors were encountered: