This is an automated email from the ASF dual-hosted git repository.
freeoneplus pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris-mcp-server.git
The following commit(s) were added to refs/heads/master by this push:
new 282a1c0 [BUG]Further fix the at_eof problem caused by aiomysql (#9)
282a1c0 is described below
commit 282a1c0bd9a86aad75a7f48478fecaa2069efb3e
Author: Yijia Su <[email protected]>
AuthorDate: Wed Jul 2 19:29:37 2025 +0800
[BUG]Further fix the at_eof problem caused by aiomysql (#9)
* fix at_eof bug
* update uv.lock
---
doris_mcp_server/utils/db.py | 305 +++++++++++++++++++++++++++++-----------
uv.lock | 328 +++++++++++++++++++++----------------------
2 files changed, 390 insertions(+), 243 deletions(-)
diff --git a/doris_mcp_server/utils/db.py b/doris_mcp_server/utils/db.py
index 8c129ff..f6cc9f5 100644
--- a/doris_mcp_server/utils/db.py
+++ b/doris_mcp_server/utils/db.py
@@ -210,7 +210,8 @@ class DorisConnectionManager:
if not self.config.database.password:
self.logger.warning("Database password is empty, this may
cause connection issues")
- # Create connection pool with additional parameters for stability
+ # Create connection pool with improved stability parameters
+ # Key change: Set minsize=0 to avoid pre-creation issues that
cause at_eof errors
self.pool = await aiomysql.create_pool(
host=self.config.database.host,
port=self.config.database.port,
@@ -218,22 +219,22 @@ class DorisConnectionManager:
password=self.config.database.password,
db=self.config.database.database,
charset="utf8",
- minsize=self.config.database.min_connections or 5,
+ minsize=0, # Avoid pre-creation issues - create connections
on demand
maxsize=self.config.database.max_connections or 20,
autocommit=True,
connect_timeout=self.connection_timeout,
- # Additional parameters for stability
- pool_recycle=3600, # Recycle connections every hour
+ # Enhanced stability parameters
+ pool_recycle=7200, # Recycle connections every 2 hours
echo=False, # Don't echo SQL statements
)
- # Test the connection pool
- if not await self.test_connection():
- raise RuntimeError("Connection pool test failed")
+ # Test the connection pool with a more robust test
+ if not await self._robust_connection_test():
+ raise RuntimeError("Connection pool robust test failed")
self.logger.info(
- f"Connection pool initialized successfully, min connections:
{self.config.database.min_connections}, "
- f"max connections: {self.config.database.max_connections}"
+ f"Connection pool initialized successfully with on-demand
connection creation, "
+ f"max connections: {self.config.database.max_connections or
20}"
)
# Start background monitoring tasks
@@ -252,63 +253,178 @@ class DorisConnectionManager:
self.pool = None
raise
+ async def _robust_connection_test(self) -> bool:
+ """Perform a robust connection test that validates full connection
health"""
+ max_retries = 3
+ for attempt in range(max_retries):
+ try:
+ self.logger.debug(f"Testing connection pool (attempt {attempt
+ 1}/{max_retries})")
+
+ # Test connection creation and validation
+ test_conn = await self._create_raw_connection_with_validation()
+ if test_conn:
+ # Test basic query execution
+ async with test_conn.cursor() as cursor:
+ await cursor.execute("SELECT 1")
+ result = await cursor.fetchone()
+ if result and result[0] == 1:
+ self.logger.debug("Connection pool test
successful")
+ # Return connection to pool
+ if self.pool:
+ self.pool.release(test_conn)
+ return True
+ else:
+ self.logger.warning("Connection test query
returned unexpected result")
+
+ # Close test connection if we get here
+ await test_conn.ensure_closed()
+
+ except Exception as e:
+ self.logger.warning(f"Connection test attempt {attempt + 1}
failed: {e}")
+ if attempt == max_retries - 1:
+ self.logger.error("All connection test attempts failed")
+ return False
+ else:
+ # Wait before retry
+ await asyncio.sleep(1.0 * (attempt + 1))
+
+ return False
+
+ async def _create_raw_connection_with_validation(self, max_retries: int =
3):
+ """Create a raw connection with comprehensive validation"""
+ for attempt in range(max_retries):
+ try:
+ if not self.pool:
+ raise RuntimeError("Connection pool not initialized")
+
+ # Acquire connection from pool
+ raw_connection = await self.pool.acquire()
+
+ # Basic connection validation
+ if not raw_connection:
+ self.logger.warning(f"Pool returned None connection
(attempt {attempt + 1})")
+ continue
+
+ if raw_connection.closed:
+ self.logger.warning(f"Pool returned closed connection
(attempt {attempt + 1})")
+ continue
+
+ # Perform a simple ping test instead of checking internal state
+ # Internal state (_reader, _transport) might not be fully
initialized yet
+ try:
+ # Test basic connectivity with a simple query
+ async with raw_connection.cursor() as cursor:
+ await cursor.execute("SELECT 1")
+ result = await cursor.fetchone()
+ if result and result[0] == 1:
+ self.logger.debug(f"Successfully created and
validated raw connection (attempt {attempt + 1})")
+ return raw_connection
+ else:
+ self.logger.warning(f"Connection test query failed
(attempt {attempt + 1})")
+ await raw_connection.ensure_closed()
+ continue
+
+ except Exception as e:
+ # Check if this is an at_eof error specifically
+ error_str = str(e).lower()
+ if 'at_eof' in error_str or 'nonetype' in error_str:
+ self.logger.warning(f"Connection has at_eof issue
(attempt {attempt + 1}): {e}")
+ else:
+ self.logger.warning(f"Connection test failed (attempt
{attempt + 1}): {e}")
+
+ try:
+ await raw_connection.ensure_closed()
+ except Exception:
+ pass
+ continue
+
+ except Exception as e:
+ self.logger.warning(f"Raw connection creation attempt {attempt
+ 1} failed: {e}")
+ if attempt == max_retries - 1:
+ raise RuntimeError(f"Failed to create valid connection
after {max_retries} attempts: {e}")
+ else:
+ # Exponential backoff
+ await asyncio.sleep(0.5 * (2 ** attempt))
+
+ raise RuntimeError("Failed to create valid connection")
+
async def get_connection(self, session_id: str) -> DorisConnection:
- """Get database connection
+ """Get database connection with enhanced reliability
Supports session-level connection reuse to improve performance and
consistency
"""
# Check if there's an existing session connection
if session_id in self.session_connections:
conn = self.session_connections[session_id]
- # Check connection health
- if await conn.ping():
+ # Enhanced connection health check
+ if await self._comprehensive_connection_health_check(conn):
return conn
else:
# Connection is unhealthy, clean up and create new one
+ self.logger.debug(f"Existing connection unhealthy for session
{session_id}, creating new one")
await self._cleanup_session_connection(session_id)
- # Create new connection
- return await self._create_new_connection(session_id)
+ # Create new connection with retry logic
+ return await self._create_new_connection_with_retry(session_id)
- async def _create_new_connection(self, session_id: str) -> DorisConnection:
- """Create new database connection"""
+ async def _comprehensive_connection_health_check(self, conn:
DorisConnection) -> bool:
+ """Perform comprehensive connection health check"""
try:
- if not self.pool:
- raise RuntimeError("Connection pool not initialized")
-
- # Get connection from pool
- raw_connection = await self.pool.acquire()
-
- # Validate the raw connection
- if not raw_connection:
- raise RuntimeError(f"Failed to acquire connection from pool
for session {session_id}")
-
- # Verify the connection is not closed
- if raw_connection.closed:
- raise RuntimeError(f"Acquired connection is already closed for
session {session_id}")
-
- # Create wrapped connection
- doris_conn = DorisConnection(raw_connection, session_id,
self.security_manager)
-
- # Test the connection before storing it
- if not await doris_conn.ping():
- # If ping fails, release the connection and raise error
- if self.pool and raw_connection and not raw_connection.closed:
- self.pool.release(raw_connection)
- raise RuntimeError(f"New connection failed ping test for
session {session_id}")
+ # Check basic connection state
+ if not conn.connection or conn.connection.closed:
+ return False
- # Store in session connections
- self.session_connections[session_id] = doris_conn
+ # Instead of checking internal state, perform a simple ping test
+ # This is more reliable and less dependent on aiomysql internals
+ if not await conn.ping():
+ return False
- self.metrics.total_connections += 1
- self.logger.debug(f"Created new connection for session:
{session_id}")
+ return True
- return doris_conn
-
except Exception as e:
- self.metrics.connection_errors += 1
- self.logger.error(f"Failed to create connection for session
{session_id}: {e}")
- raise
+ # Check for at_eof errors specifically
+ error_str = str(e).lower()
+ if 'at_eof' in error_str:
+ self.logger.debug(f"Connection health check failed with at_eof
error: {e}")
+ else:
+ self.logger.debug(f"Connection health check failed: {e}")
+ return False
+
+ async def _create_new_connection_with_retry(self, session_id: str,
max_retries: int = 3) -> DorisConnection:
+ """Create new database connection with retry logic"""
+ for attempt in range(max_retries):
+ try:
+ # Get validated raw connection
+ raw_connection = await
self._create_raw_connection_with_validation()
+
+ # Create wrapped connection
+ doris_conn = DorisConnection(raw_connection, session_id,
self.security_manager)
+
+ # Comprehensive connection test
+ if await
self._comprehensive_connection_health_check(doris_conn):
+ # Store in session connections
+ self.session_connections[session_id] = doris_conn
+ self.metrics.total_connections += 1
+ self.logger.debug(f"Successfully created new connection
for session: {session_id}")
+ return doris_conn
+ else:
+ # Connection failed health check, clean up and retry
+ self.logger.warning(f"New connection failed health check
for session {session_id} (attempt {attempt + 1})")
+ try:
+ await doris_conn.close()
+ except Exception:
+ pass
+
+ except Exception as e:
+ self.logger.warning(f"Connection creation attempt {attempt +
1} failed for session {session_id}: {e}")
+ if attempt == max_retries - 1:
+ self.metrics.connection_errors += 1
+ raise RuntimeError(f"Failed to create connection for
session {session_id} after {max_retries} attempts: {e}")
+ else:
+ # Exponential backoff
+ await asyncio.sleep(0.5 * (2 ** attempt))
+
+ raise RuntimeError(f"Unexpected failure in connection creation for
session {session_id}")
async def release_connection(self, session_id: str):
"""Release session connection"""
@@ -316,26 +432,47 @@ class DorisConnectionManager:
await self._cleanup_session_connection(session_id)
async def _cleanup_session_connection(self, session_id: str):
- """Clean up session connection"""
+ """Clean up session connection with enhanced safety"""
if session_id in self.session_connections:
conn = self.session_connections[session_id]
try:
- # Return connection to pool only if it's valid and not closed
+ # Simplified connection validation before returning to pool
+ connection_healthy = False
+
if (self.pool and
conn.connection and
- not conn.connection.closed and
- hasattr(conn.connection, '_reader') and
- conn.connection._reader is not None):
+ not conn.connection.closed):
+
+ # Test if connection is still healthy with a simple check
+ try:
+ # Quick ping test to see if connection is usable
+ async with conn.connection.cursor() as cursor:
+ await cursor.execute("SELECT 1")
+ await cursor.fetchone()
+ connection_healthy = True
+ except Exception as test_error:
+ self.logger.debug(f"Connection health test failed for
session {session_id}: {test_error}")
+ connection_healthy = False
+
+ if connection_healthy:
+ # Connection appears healthy, return to pool
try:
- # Try to gracefully return to pool
self.pool.release(conn.connection)
+ self.logger.debug(f"Successfully returned connection
to pool for session {session_id}")
except Exception as pool_error:
self.logger.debug(f"Failed to return connection to
pool for session {session_id}: {pool_error}")
- # If pool release fails, try to close the connection
directly
try:
await conn.connection.ensure_closed()
except Exception:
- pass # Ignore errors during forced close
+ pass
+ else:
+ # Connection is unhealthy, force close
+ self.logger.debug(f"Connection unhealthy for session
{session_id}, force closing")
+ try:
+ if conn.connection and not conn.connection.closed:
+ await conn.connection.ensure_closed()
+ except Exception:
+ pass
# Close connection wrapper
await conn.close()
@@ -365,24 +502,24 @@ class DorisConnectionManager:
self.logger.error(f"Health check error: {e}")
async def _perform_health_check(self):
- """Perform health check"""
+ """Perform enhanced health check"""
try:
unhealthy_sessions = []
- # First pass: check basic connectivity
+ # Enhanced health check with comprehensive validation
for session_id, conn in self.session_connections.items():
- if not await conn.ping():
+ if not await self._comprehensive_connection_health_check(conn):
unhealthy_sessions.append(session_id)
- # Second pass: check for stale connections (over 30 minutes old)
+ # Check for stale connections (over 30 minutes old)
current_time = datetime.utcnow()
stale_sessions = []
for session_id, conn in self.session_connections.items():
if session_id not in unhealthy_sessions: # Don't double-check
last_used_delta = (current_time -
conn.last_used).total_seconds()
if last_used_delta > 1800: # 30 minutes
- # Force a ping check for stale connections
- if not await conn.ping():
+ # Force a comprehensive health check for stale
connections
+ if not await
self._comprehensive_connection_health_check(conn):
stale_sessions.append(session_id)
all_problematic_sessions = list(set(unhealthy_sessions +
stale_sessions))
@@ -453,9 +590,29 @@ class DorisConnectionManager:
async def execute_query(
self, session_id: str, sql: str, params: tuple | None = None,
auth_context=None
) -> QueryResult:
- """Execute query"""
- conn = await self.get_connection(session_id)
- return await conn.execute(sql, params, auth_context)
+ """Execute query with enhanced error handling and retry logic"""
+ max_retries = 2
+ for attempt in range(max_retries):
+ try:
+ conn = await self.get_connection(session_id)
+ return await conn.execute(sql, params, auth_context)
+ except Exception as e:
+ error_msg = str(e).lower()
+ # Check for connection-related errors that warrant retry
+ is_connection_error = any(keyword in error_msg for keyword in [
+ 'at_eof', 'connection', 'closed', 'nonetype', 'reader',
'transport'
+ ])
+
+ if is_connection_error and attempt < max_retries - 1:
+ self.logger.warning(f"Connection error during query
execution (attempt {attempt + 1}): {e}")
+ # Clean up the problematic connection
+ await self.release_connection(session_id)
+ # Wait before retry
+ await asyncio.sleep(0.5 * (attempt + 1))
+ continue
+ else:
+ # Not a connection error or final retry - re-raise
+ raise
@asynccontextmanager
async def get_connection_context(self, session_id: str):
@@ -500,20 +657,8 @@ class DorisConnectionManager:
self.logger.error(f"Error closing connection manager: {e}")
async def test_connection(self) -> bool:
- """Test database connection"""
- try:
- if not self.pool:
- return False
-
- async with self.pool.acquire() as conn:
- async with conn.cursor() as cursor:
- await cursor.execute("SELECT 1")
- result = await cursor.fetchone()
- return result is not None
-
- except Exception as e:
- self.logger.error(f"Connection test failed: {e}")
- return False
+ """Test database connection using robust connection test"""
+ return await self._robust_connection_test()
async def diagnose_connection_health(self) -> Dict[str, Any]:
"""Diagnose connection pool and session health"""
@@ -680,3 +825,5 @@ class ConnectionPoolMonitor:
report["recommendations"].append("Connection pool utilization is
high, consider increasing pool size")
return report
+
+
diff --git a/uv.lock b/uv.lock
index db4e9ea..518966a 100644
--- a/uv.lock
+++ b/uv.lock
@@ -518,6 +518,170 @@ wheels = [
{ url =
"https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl",
hash =
"sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size
= 587408 },
]
+[[package]]
+name = "doris-mcp-server"
+version = "0.4.2"
+source = { editable = "." }
+dependencies = [
+ { name = "aiofiles" },
+ { name = "aiohttp" },
+ { name = "aiomysql" },
+ { name = "aioredis" },
+ { name = "asyncio-mqtt" },
+ { name = "bcrypt" },
+ { name = "click" },
+ { name = "cryptography" },
+ { name = "fastapi" },
+ { name = "httpx" },
+ { name = "mcp" },
+ { name = "numpy" },
+ { name = "orjson" },
+ { name = "pandas" },
+ { name = "passlib", extra = ["bcrypt"] },
+ { name = "prometheus-client" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "pyjwt" },
+ { name = "pymysql" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-cov" },
+ { name = "python-dateutil" },
+ { name = "python-dotenv" },
+ { name = "python-jose", extra = ["cryptography"] },
+ { name = "python-multipart" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "rich" },
+ { name = "sqlparse" },
+ { name = "starlette" },
+ { name = "structlog" },
+ { name = "toml" },
+ { name = "tqdm" },
+ { name = "typer" },
+ { name = "uvicorn", extra = ["standard"] },
+ { name = "websockets" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "bandit" },
+ { name = "black" },
+ { name = "flake8" },
+ { name = "isort" },
+ { name = "mypy" },
+ { name = "myst-parser" },
+ { name = "pre-commit" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-cov" },
+ { name = "pytest-mock" },
+ { name = "pytest-xdist" },
+ { name = "ruff" },
+ { name = "safety" },
+ { name = "sphinx" },
+ { name = "sphinx-rtd-theme" },
+ { name = "tox" },
+]
+docs = [
+ { name = "myst-parser" },
+ { name = "sphinx" },
+ { name = "sphinx-autoapi" },
+ { name = "sphinx-rtd-theme" },
+]
+monitoring = [
+ { name = "grafana-client" },
+ { name = "jaeger-client" },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-sdk" },
+ { name = "prometheus-client" },
+]
+performance = [
+ { name = "cchardet" },
+ { name = "orjson" },
+ { name = "uvloop" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "aiofiles", specifier = ">=23.0.0" },
+ { name = "aiohttp", specifier = ">=3.9.0" },
+ { name = "aiomysql", specifier = ">=0.2.0" },
+ { name = "aioredis", specifier = ">=2.0.0" },
+ { name = "asyncio-mqtt", specifier = ">=0.16.0" },
+ { name = "bandit", marker = "extra == 'dev'", specifier = ">=1.7.0" },
+ { name = "bcrypt", specifier = ">=4.1.0" },
+ { name = "black", marker = "extra == 'dev'", specifier = ">=23.12.0" },
+ { name = "cchardet", marker = "extra == 'performance'", specifier =
">=2.1.0" },
+ { name = "click", specifier = ">=8.1.0" },
+ { name = "cryptography", specifier = ">=41.0.0" },
+ { name = "fastapi", specifier = ">=0.108.0" },
+ { name = "flake8", marker = "extra == 'dev'", specifier = ">=7.0.0" },
+ { name = "grafana-client", marker = "extra == 'monitoring'", specifier =
">=3.5.0" },
+ { name = "httpx", specifier = ">=0.26.0" },
+ { name = "isort", marker = "extra == 'dev'", specifier = ">=5.13.0" },
+ { name = "jaeger-client", marker = "extra == 'monitoring'", specifier =
">=4.8.0" },
+ { name = "mcp", specifier = ">=1.8.0,<2.0.0" },
+ { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.8.0" },
+ { name = "myst-parser", marker = "extra == 'dev'", specifier = ">=2.0.0" },
+ { name = "myst-parser", marker = "extra == 'docs'", specifier = ">=2.0.0"
},
+ { name = "numpy", specifier = ">=1.24.0" },
+ { name = "opentelemetry-api", marker = "extra == 'monitoring'", specifier
= ">=1.21.0" },
+ { name = "opentelemetry-sdk", marker = "extra == 'monitoring'", specifier
= ">=1.21.0" },
+ { name = "orjson", specifier = ">=3.9.0" },
+ { name = "orjson", marker = "extra == 'performance'", specifier =
">=3.9.0" },
+ { name = "pandas", specifier = ">=2.0.0" },
+ { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.0" },
+ { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.6.0" },
+ { name = "prometheus-client", specifier = ">=0.19.0" },
+ { name = "prometheus-client", marker = "extra == 'monitoring'", specifier
= ">=0.19.0" },
+ { name = "pydantic", specifier = ">=2.5.0" },
+ { name = "pydantic-settings", specifier = ">=2.1.0" },
+ { name = "pyjwt", specifier = ">=2.8.0" },
+ { name = "pymysql", specifier = ">=1.1.0" },
+ { name = "pytest", specifier = ">=8.4.0" },
+ { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.0" },
+ { name = "pytest-asyncio", specifier = ">=1.0.0" },
+ { name = "pytest-asyncio", marker = "extra == 'dev'", specifier =
">=0.23.0" },
+ { name = "pytest-cov", specifier = ">=6.1.1" },
+ { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" },
+ { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.12.0"
},
+ { name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.5.0"
},
+ { name = "python-dateutil", specifier = ">=2.8.0" },
+ { name = "python-dotenv", specifier = ">=1.0.0" },
+ { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
+ { name = "python-multipart", specifier = ">=0.0.6" },
+ { name = "pyyaml", specifier = ">=6.0.0" },
+ { name = "requests", specifier = ">=2.31.0" },
+ { name = "rich", specifier = ">=13.7.0" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" },
+ { name = "safety", marker = "extra == 'dev'", specifier = ">=2.3.0" },
+ { name = "sphinx", marker = "extra == 'dev'", specifier = ">=7.2.0" },
+ { name = "sphinx", marker = "extra == 'docs'", specifier = ">=7.2.0" },
+ { name = "sphinx-autoapi", marker = "extra == 'docs'", specifier =
">=3.0.0" },
+ { name = "sphinx-rtd-theme", marker = "extra == 'dev'", specifier =
">=2.0.0" },
+ { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier =
">=2.0.0" },
+ { name = "sqlparse", specifier = ">=0.4.4" },
+ { name = "starlette", specifier = ">=0.27.0" },
+ { name = "structlog", specifier = ">=23.2.0" },
+ { name = "toml", specifier = ">=0.10.0" },
+ { name = "tox", marker = "extra == 'dev'", specifier = ">=4.11.0" },
+ { name = "tqdm", specifier = ">=4.66.0" },
+ { name = "typer", specifier = ">=0.9.0" },
+ { name = "uvicorn", extras = ["standard"], specifier = ">=0.25.0" },
+ { name = "uvloop", marker = "extra == 'performance'", specifier =
">=0.19.0" },
+ { name = "websockets", specifier = ">=12.0" },
+]
+provides-extras = ["dev", "docs", "performance", "monitoring"]
+
+[package.metadata.requires-dev]
+dev = [{ name = "ruff", specifier = ">=0.11.13" }]
+
[[package]]
name = "dparse"
version = "0.6.4"
@@ -946,170 +1110,6 @@ wheels = [
{ url =
"https://files.pythonhosted.org/packages/79/45/823ad05504bea55cb0feb7470387f151252127ad5c72f8882e8fe6cf5c0e/mcp-1.9.3-py3-none-any.whl",
hash =
"sha256:69b0136d1ac9927402ed4cf221d4b8ff875e7132b0b06edd446448766f34f9b9", size
= 131063 },
]
-[[package]]
-name = "mcp-doris-server"
-version = "0.4.2"
-source = { editable = "." }
-dependencies = [
- { name = "aiofiles" },
- { name = "aiohttp" },
- { name = "aiomysql" },
- { name = "aioredis" },
- { name = "asyncio-mqtt" },
- { name = "bcrypt" },
- { name = "click" },
- { name = "cryptography" },
- { name = "fastapi" },
- { name = "httpx" },
- { name = "mcp" },
- { name = "numpy" },
- { name = "orjson" },
- { name = "pandas" },
- { name = "passlib", extra = ["bcrypt"] },
- { name = "prometheus-client" },
- { name = "pydantic" },
- { name = "pydantic-settings" },
- { name = "pyjwt" },
- { name = "pymysql" },
- { name = "pytest" },
- { name = "pytest-asyncio" },
- { name = "pytest-cov" },
- { name = "python-dateutil" },
- { name = "python-dotenv" },
- { name = "python-jose", extra = ["cryptography"] },
- { name = "python-multipart" },
- { name = "pyyaml" },
- { name = "requests" },
- { name = "rich" },
- { name = "sqlparse" },
- { name = "starlette" },
- { name = "structlog" },
- { name = "toml" },
- { name = "tqdm" },
- { name = "typer" },
- { name = "uvicorn", extra = ["standard"] },
- { name = "websockets" },
-]
-
-[package.optional-dependencies]
-dev = [
- { name = "bandit" },
- { name = "black" },
- { name = "flake8" },
- { name = "isort" },
- { name = "mypy" },
- { name = "myst-parser" },
- { name = "pre-commit" },
- { name = "pytest" },
- { name = "pytest-asyncio" },
- { name = "pytest-cov" },
- { name = "pytest-mock" },
- { name = "pytest-xdist" },
- { name = "ruff" },
- { name = "safety" },
- { name = "sphinx" },
- { name = "sphinx-rtd-theme" },
- { name = "tox" },
-]
-docs = [
- { name = "myst-parser" },
- { name = "sphinx" },
- { name = "sphinx-autoapi" },
- { name = "sphinx-rtd-theme" },
-]
-monitoring = [
- { name = "grafana-client" },
- { name = "jaeger-client" },
- { name = "opentelemetry-api" },
- { name = "opentelemetry-sdk" },
- { name = "prometheus-client" },
-]
-performance = [
- { name = "cchardet" },
- { name = "orjson" },
- { name = "uvloop" },
-]
-
-[package.dev-dependencies]
-dev = [
- { name = "ruff" },
-]
-
-[package.metadata]
-requires-dist = [
- { name = "aiofiles", specifier = ">=23.0.0" },
- { name = "aiohttp", specifier = ">=3.9.0" },
- { name = "aiomysql", specifier = ">=0.2.0" },
- { name = "aioredis", specifier = ">=2.0.0" },
- { name = "asyncio-mqtt", specifier = ">=0.16.0" },
- { name = "bandit", marker = "extra == 'dev'", specifier = ">=1.7.0" },
- { name = "bcrypt", specifier = ">=4.1.0" },
- { name = "black", marker = "extra == 'dev'", specifier = ">=23.12.0" },
- { name = "cchardet", marker = "extra == 'performance'", specifier =
">=2.1.0" },
- { name = "click", specifier = ">=8.1.0" },
- { name = "cryptography", specifier = ">=41.0.0" },
- { name = "fastapi", specifier = ">=0.108.0" },
- { name = "flake8", marker = "extra == 'dev'", specifier = ">=7.0.0" },
- { name = "grafana-client", marker = "extra == 'monitoring'", specifier =
">=3.5.0" },
- { name = "httpx", specifier = ">=0.26.0" },
- { name = "isort", marker = "extra == 'dev'", specifier = ">=5.13.0" },
- { name = "jaeger-client", marker = "extra == 'monitoring'", specifier =
">=4.8.0" },
- { name = "mcp", specifier = ">=1.8.0,<2.0.0" },
- { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.8.0" },
- { name = "myst-parser", marker = "extra == 'dev'", specifier = ">=2.0.0" },
- { name = "myst-parser", marker = "extra == 'docs'", specifier = ">=2.0.0"
},
- { name = "numpy", specifier = ">=1.24.0" },
- { name = "opentelemetry-api", marker = "extra == 'monitoring'", specifier
= ">=1.21.0" },
- { name = "opentelemetry-sdk", marker = "extra == 'monitoring'", specifier
= ">=1.21.0" },
- { name = "orjson", specifier = ">=3.9.0" },
- { name = "orjson", marker = "extra == 'performance'", specifier =
">=3.9.0" },
- { name = "pandas", specifier = ">=2.0.0" },
- { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.0" },
- { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.6.0" },
- { name = "prometheus-client", specifier = ">=0.19.0" },
- { name = "prometheus-client", marker = "extra == 'monitoring'", specifier
= ">=0.19.0" },
- { name = "pydantic", specifier = ">=2.5.0" },
- { name = "pydantic-settings", specifier = ">=2.1.0" },
- { name = "pyjwt", specifier = ">=2.8.0" },
- { name = "pymysql", specifier = ">=1.1.0" },
- { name = "pytest", specifier = ">=8.4.0" },
- { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.0" },
- { name = "pytest-asyncio", specifier = ">=1.0.0" },
- { name = "pytest-asyncio", marker = "extra == 'dev'", specifier =
">=0.23.0" },
- { name = "pytest-cov", specifier = ">=6.1.1" },
- { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" },
- { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.12.0"
},
- { name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.5.0"
},
- { name = "python-dateutil", specifier = ">=2.8.0" },
- { name = "python-dotenv", specifier = ">=1.0.0" },
- { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
- { name = "python-multipart", specifier = ">=0.0.6" },
- { name = "pyyaml", specifier = ">=6.0.0" },
- { name = "requests", specifier = ">=2.31.0" },
- { name = "rich", specifier = ">=13.7.0" },
- { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" },
- { name = "safety", marker = "extra == 'dev'", specifier = ">=2.3.0" },
- { name = "sphinx", marker = "extra == 'dev'", specifier = ">=7.2.0" },
- { name = "sphinx", marker = "extra == 'docs'", specifier = ">=7.2.0" },
- { name = "sphinx-autoapi", marker = "extra == 'docs'", specifier =
">=3.0.0" },
- { name = "sphinx-rtd-theme", marker = "extra == 'dev'", specifier =
">=2.0.0" },
- { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier =
">=2.0.0" },
- { name = "sqlparse", specifier = ">=0.4.4" },
- { name = "starlette", specifier = ">=0.27.0" },
- { name = "structlog", specifier = ">=23.2.0" },
- { name = "toml", specifier = ">=0.10.0" },
- { name = "tox", marker = "extra == 'dev'", specifier = ">=4.11.0" },
- { name = "tqdm", specifier = ">=4.66.0" },
- { name = "typer", specifier = ">=0.9.0" },
- { name = "uvicorn", extras = ["standard"], specifier = ">=0.25.0" },
- { name = "uvloop", marker = "extra == 'performance'", specifier =
">=0.19.0" },
- { name = "websockets", specifier = ">=12.0" },
-]
-provides-extras = ["dev", "docs", "performance", "monitoring"]
-
-[package.metadata.requires-dev]
-dev = [{ name = "ruff", specifier = ">=0.11.13" }]
-
[[package]]
name = "mdit-py-plugins"
version = "0.4.2"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]