Merge remote-tracking branch 'origin/main'

This commit is contained in:
Jerry 2025-05-29 18:44:43 +08:00
commit ccd37ce4e6
37 changed files with 479 additions and 895 deletions

191
src/backend/.gitignore vendored
View File

@ -1,52 +1,7 @@
# Python
__pycache__/
*.py[cod]
*$py.class
.Python
env/
venv/
ENV/
env.bak/
venv.bak/
# IDE
.idea/
.vscode/
*.swp
*.swo
# Logs
*.log
logs/
# Environment variables
.env
.env.local
.env.development
.env.test
.env.production
# Docker
docker-compose.override.yml
# Test
.coverage
htmlcov/
.pytest_cache/
# Build
dist/
build/
*.egg-info/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
@ -60,155 +15,23 @@ parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# IDE
.idea/
.vscode/
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
# Environment
.env
.venv
.venv/
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Logs
*.log

View File

@ -1,22 +1,15 @@
version: '3.13.2'
FROM python:3.13-slim
services:
app:
build: .
ports:
- "8000:8000"
depends_on:
- redis
environment:
- REDIS_URL=redis://redis:6379
WORKDIR /app
redis:
image: redis:alpine
ports:
- "6379:6379"
COPY ./requirements.txt /app/requirements.txt
worker:
build: .
command: celery -A app.services.task_service worker --loglevel=info
depends_on:
- ONBUILD
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
COPY . /app
ENV PYTHONPATH=/app
ENV PORT=8000
ENV HOST=0.0.0.0
CMD ["uvicorn", "run:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]

1
src/backend/__init__.py Normal file
View File

@ -0,0 +1 @@
#empty

View File

@ -0,0 +1,23 @@
from fastapi import FastAPI
from src.backend.app.api.endpoints import router as api_router
from src.backend.app.utils.logger import setup_logging
from src.backend.config import settings
def create_app() -> FastAPI:
# 设置日志
setup_logging()
# 创建FastAPI应用
app = FastAPI(
title=settings.APP_NAME,
debug=settings.DEBUG,
docs_url=f"{settings.API_PREFIX}/docs",
redoc_url=f"{settings.API_PREFIX}/redoc",
openapi_url=f"{settings.API_PREFIX}/openapi.json"
)
# 添加API路由
app.include_router(api_router, prefix=settings.API_PREFIX)
return app

View File

@ -1,29 +0,0 @@
from .base import BaseAdapter
from .cisco import CiscoAdapter
from .huawei import HuaweiAdapter
from .factory import AdapterFactory
# 自动注册所有适配器类
__all_adapters__ = {
'cisco': CiscoAdapter,
'huawei': HuaweiAdapter
}
def get_supported_vendors() -> list:
"""获取当前支持的设备厂商列表"""
return list(__all_adapters__.keys())
def init_adapters():
"""初始化适配器工厂"""
AdapterFactory.register_adapters(__all_adapters__)
# 应用启动时自动初始化
init_adapters()
__all__ = [
'BaseAdapter',
'CiscoAdapter',
'HuaweiAdapter',
'AdapterFactory',
'get_supported_vendors'
]

View File

@ -1,16 +0,0 @@
# /backend/app/adapters/base.py
from abc import ABC, abstractmethod
from typing import Dict, Any
class BaseAdapter(ABC):
@abstractmethod
async def connect(self, ip: str, credentials: Dict[str, str]):
pass
@abstractmethod
async def deploy_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
pass
@abstractmethod
async def get_status(self) -> Dict[str, Any]:
pass

View File

@ -1,32 +0,0 @@
# /backend/app/adapters/cisco.py
from netmiko import ConnectHandler
from .base import BaseAdapter
class CiscoAdapter(BaseAdapter):
def __init__(self):
self.connection = None
async def connect(self, ip: str, credentials: Dict[str, str]):
self.connection = ConnectHandler(
device_type='cisco_ios',
host=ip,
username=credentials['username'],
password=credentials['password'],
timeout=10
)
async def deploy_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
commands = self._generate_commands(config)
output = self.connection.send_config_set(commands)
return {'success': True, 'output': output}
def _generate_commands(self, config: Dict[str, Any]) -> list:
# 实际生产中应使用Jinja2模板
commands = []
if 'vlans' in config:
for vlan in config['vlans']:
commands.extend([
f"vlan {vlan['id']}",
f"name {vlan['name']}"
])
return commands

View File

@ -1,21 +0,0 @@
from . import BaseAdapter
from .cisco import CiscoAdapter
from .huawei import HuaweiAdapter
class AdapterFactory:
_adapters = {}
@classmethod
def register_adapters(cls, adapters: dict):
"""注册适配器字典"""
cls._adapters.update(adapters)
@classmethod
def get_adapter(vendor: str)->BaseAdapter:
adapters = {
'cisco': CiscoAdapter,
'huawei': HuaweiAdapter
}
if vendor not in cls._adapters:
raise ValueError(f"Unsupported vendor: {vendor}")
return cls._adapters[vendor]()

View File

@ -1,26 +0,0 @@
import httpx
from .base import BaseAdapter
class HuaweiAdapter(BaseAdapter):
def __init__(self):
self.client = None
self.base_url = None
async def connect(self, ip: str, credentials: dict):
self.base_url = f"https://{ip}/restconf"
self.client = httpx.AsyncClient(
auth=(credentials['username'], credentials['password']),
verify=False,
timeout=30.0
)
async def deploy_config(self, config: dict):
headers = {"Content-Type": "application/yang-data+json"}
url = f"{self.base_url}/data/ietf-restconf:operations/network-topology:deploy"
response = await self.client.post(url, json=config, headers=headers)
response.raise_for_status()
return response.json()
async def disconnect(self):
if self.client:
await self.client.aclose()

View File

@ -1,7 +1,4 @@
from fastapi import APIRouter
from src.backend.app.api.command_parser import router as command_router
from src.backend.app.api.network_config import router as config_router
from .endpoints import router
router = APIRouter()
router.include_router(command_router, prefix="/parse_command", tags=["Command Parsing"])
router.include_router(config_router, prefix="/apply_config", tags=["Configuration"])
__all__ = ["router"]

View File

@ -1,40 +0,0 @@
from fastapi import APIRouter, HTTPException, BackgroundTasks
from pydantic import BaseModel
from typing import List
from app.services.batch import BatchService
from app.utils.decorators import async_retry
router = APIRouter()
class BulkDeviceConfig(BaseModel):
device_ips: List[str]
config: dict
credentials: dict
vendor: str = "cisco"
timeout: int = 30
@router.post("/config")
@async_retry(max_attempts=3, delay=1)
async def bulk_apply_config(request: BulkDeviceConfig, bg_tasks: BackgroundTasks):
"""
批量配置设备接口
示例请求体:
{
"device_ips": ["192.168.1.1", "192.168.1.2"],
"config": {"vlans": [{"id": 100, "name": "test"}]},
"credentials": {"username": "admin", "password": "secret"},
"vendor": "cisco"
}
"""
devices = [{
"ip": ip,
"credentials": request.credentials,
"vendor": request.vendor
} for ip in request.device_ips]
try:
batch = BatchService()
bg_tasks.add_task(batch.deploy_batch, devices, request.config)
return {"message": "Batch job started", "device_count": len(devices)}
except Exception as e:
raise HTTPException(500, detail=str(e))

View File

@ -1,68 +1,69 @@
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
from typing import Dict, Any
from src.backend.app.services.ai_services import AIService
from src.backend.config import settings
from src.backend.app.services.ai_service import call_ai_api
import logging
router = APIRouter()
logger = logging.getLogger(__name__)
class CommandRequest(BaseModel):
command: str
device_type: Optional[str] = "switch"
vendor: Optional[str] = "cisco"
class CommandParser:
def __init__(self):
self.ai_service = AIService(settings.SILICONFLOW_API_KEY, settings.SILICONFLOW_API_URL)
async def parse(self, command: str) -> Dict[str, Any]:
"""
解析中文命令并返回配置
"""
# 首先尝试本地简单解析
local_parsed = self._try_local_parse(command)
if local_parsed:
return local_parsed
class CommandResponse(BaseModel):
original_command: str
parsed_config: dict
success: bool
message: Optional[str] = None
# 本地无法解析则调用AI服务
return await self.ai_service.parse_command(command)
def _try_local_parse(self, command: str) -> Dict[str, Any]:
"""
尝试本地解析常见命令
"""
command = command.lower().strip()
@router.post("", response_model=CommandResponse)
async def parse_command(request: CommandRequest):
"""
解析中文网络配置命令返回JSON格式的配置
# VLAN配置
if "vlan" in command and "创建" in command:
parts = command.split()
vlan_id = next((p for p in parts if p.isdigit()), None)
if vlan_id:
return {
"type": "vlan",
"vlan_id": int(vlan_id),
"name": f"VLAN{vlan_id}",
"interfaces": []
}
参数:
- command: 中文配置命令"创建VLAN 100名称为财务部"
- device_type: 设备类型默认为switch
- vendor: 设备厂商默认为cisco
# 接口配置
if any(word in command for word in ["接口", "端口"]) and any(
word in command for word in ["启用", "关闭", "配置"]):
parts = command.split()
interface = next((p for p in parts if p.startswith(("gi", "fa", "te", "eth"))), None)
if interface:
config = {
"type": "interface",
"interface": interface,
"state": "up" if "启用" in command else "down"
}
返回:
- 解析后的JSON配置
"""
try:
logger.info(f"Received command: {request.command}")
if "ip" in command and "地址" in command:
ip_part = next((p for p in parts if "." in p and p.count(".") == 3), None)
if ip_part:
config["ip_address"] = ip_part
# 调用AI服务解析命令
ai_response = await call_ai_api(
command=request.command,
device_type=request.device_type,
vendor=request.vendor,
api_key=settings.ai_api_key
)
if "描述" in command:
desc_start = command.find("描述") + 2
description = command[desc_start:].strip()
config["description"] = description
if not ai_response.get("success"):
raise HTTPException(
status_code=400,
detail=ai_response.get("message", "Failed to parse command")
)
if "vlan" in command:
vlan_id = next((p for p in parts if p.isdigit()), None)
if vlan_id:
config["vlan"] = int(vlan_id)
return CommandResponse(
original_command=request.command,
parsed_config=ai_response["config"],
success=True,
message="Command parsed successfully"
)
return config
except Exception as e:
logger.error(f"Error parsing command: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Error processing command: {str(e)}"
)
return None

View File

@ -0,0 +1,50 @@
from fastapi import APIRouter, Depends, HTTPException
from typing import Any
from pydantic import BaseModel
from src.backend.app.services.ai_services import AIService
from src.backend.app.api.network_config import SwitchConfigurator
from src.backend.config import settings
router = APIRouter()
class CommandRequest(BaseModel):
command: str
class ConfigRequest(BaseModel):
config: dict
switch_ip: str
@router.post("/parse_command", response_model=dict)
async def parse_command(request: CommandRequest):
"""
解析中文命令并返回JSON配置
"""
try:
ai_service = AIService(settings.SILICONFLOW_API_KEY, settings.SILICONFLOW_API_URL)
config = await ai_service.parse_command(request.command)
return {"success": True, "config": config}
except Exception as e:
raise HTTPException(
status_code=400,
detail=f"Failed to parse command: {str(e)}"
)
@router.post("/apply_config", response_model=dict)
async def apply_config(request: ConfigRequest):
"""
应用配置到交换机
"""
try:
configurator = SwitchConfigurator(
username=settings.SWITCH_USERNAME,
password=settings.SWITCH_PASSWORD,
timeout=settings.SWITCH_TIMEOUT
)
result = await configurator.apply_config(request.switch_ip, request.config)
return {"success": True, "result": result}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to apply config: {str(e)}"
)

View File

@ -1,17 +0,0 @@
from fastapi import APIRouter
from ...monitoring.healthcheck import check_redis, check_ai_service
router = APIRouter()
@router.get("/live")
async def liveness_check():
return {"status": "alive"}
@router.get("/ready")
async def readiness_check():
redis_ok = await check_redis()
ai_ok = await check_ai_service()
return {
"redis": redis_ok,
"ai_service": ai_ok
}

View File

@ -1,84 +1,174 @@
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
import logging
import requests
router = APIRouter()
logger = logging.getLogger(__name__)
import paramiko
import asyncio
from typing import Dict, Any
from ..utils.exceptions import SwitchConfigException
class ConfigRequest(BaseModel):
config: dict
device_ip: str
credentials: dict
dry_run: Optional[bool] = True
class SwitchConfigurator:
def __init__(self, username: str, password: str, timeout: int = 10):
self.username = username
self.password = password
self.timeout = timeout
async def apply_config(self, switch_ip: str, config: Dict[str, Any]) -> str:
"""
应用配置到交换机
"""
try:
# 根据配置类型调用不同的方法
config_type = config.get("type", "").lower()
class ConfigResponse(BaseModel):
success: bool
message: str
applied_config: Optional[dict] = None
device_response: Optional[str] = None
if config_type == "vlan":
return await self._configure_vlan(switch_ip, config)
elif config_type == "interface":
return await self._configure_interface(switch_ip, config)
elif config_type == "acl":
return await self._configure_acl(switch_ip, config)
elif config_type == "route":
return await self._configure_route(switch_ip, config)
else:
raise SwitchConfigException(f"Unsupported config type: {config_type}")
except Exception as e:
raise SwitchConfigException(str(e))
async def _send_commands(self, switch_ip: str, commands: list) -> str:
"""
发送命令到交换机
"""
try:
# 使用Paramiko建立SSH连接
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
@router.post("", response_model=ConfigResponse)
async def apply_config(request: ConfigRequest):
"""
将生成的配置应用到网络设备
参数:
- config: 生成的JSON配置
- device_ip: 目标设备IP地址
- credentials: 设备登录凭证 {username: str, password: str}
- dry_run: 是否仅测试而不实际应用默认为True
返回:
- 应用结果和设备响应
"""
try:
logger.info(f"Applying config to device {request.device_ip}")
# 这里应该是实际与交换机交互的逻辑
# 由于不同厂商设备交互方式不同,这里只是一个示例
if request.dry_run:
logger.info("Dry run mode - not actually applying config")
return ConfigResponse(
success=True,
message="Dry run successful - config not applied",
applied_config=request.config
# 在异步上下文中运行阻塞操作
loop = asyncio.get_event_loop()
await loop.run_in_executor(
None,
lambda: ssh.connect(
switch_ip,
username=self.username,
password=self.password,
timeout=self.timeout
)
)
# 模拟与设备交互
device_response = simulate_device_interaction(
request.device_ip,
request.credentials,
request.config
)
# 获取SSH shell
shell = await loop.run_in_executor(None, ssh.invoke_shell)
return ConfigResponse(
success=True,
message="Config applied successfully",
applied_config=request.config,
device_response=device_response
)
# 发送配置命令
output = ""
for cmd in commands:
await loop.run_in_executor(None, shell.send, cmd + "\n")
await asyncio.sleep(0.5)
while shell.recv_ready():
output += (await loop.run_in_executor(None, shell.recv, 1024)).decode("utf-8")
except Exception as e:
logger.error(f"Error applying config: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Error applying config: {str(e)}"
)
# 关闭连接
await loop.run_in_executor(None, ssh.close)
return output
except Exception as e:
raise SwitchConfigException(f"SSH connection failed: {str(e)}")
def simulate_device_interaction(device_ip: str, credentials: dict, config: dict) -> str:
"""
模拟与网络设备的交互
async def _configure_vlan(self, switch_ip: str, config: Dict[str, Any]) -> str:
"""
配置VLAN
"""
vlan_id = config["vlan_id"]
vlan_name = config.get("name", f"VLAN{vlan_id}")
interfaces = config.get("interfaces", [])
在实际实现中这里会使用netmikoparamiko或厂商特定的SDK
与设备建立连接并推送配置
"""
# 这里只是一个模拟实现
return f"Config applied to {device_ip} successfully. {len(config)} commands executed."
commands = [
"configure terminal",
f"vlan {vlan_id}",
f"name {vlan_name}",
]
# 配置接口
for intf in interfaces:
commands.extend([
f"interface {intf['interface']}",
f"switchport access vlan {vlan_id}",
"exit"
])
commands.append("end")
return await self._send_commands(switch_ip, commands)
async def _configure_interface(self, switch_ip: str, config: Dict[str, Any]) -> str:
"""
配置接口
"""
interface = config["interface"]
ip_address = config.get("ip_address")
description = config.get("description", "")
vlan = config.get("vlan")
state = config.get("state", "up")
commands = [
"configure terminal",
f"interface {interface}",
f"description {description}",
]
if ip_address:
commands.append(f"ip address {ip_address}")
if vlan:
commands.append(f"switchport access vlan {vlan}")
if state.lower() == "up":
commands.append("no shutdown")
else:
commands.append("shutdown")
commands.extend(["exit", "end"])
return await self._send_commands(switch_ip, commands)
async def _configure_acl(self, switch_ip: str, config: Dict[str, Any]) -> str:
"""
配置ACL
"""
acl_id = config["acl_id"]
acl_type = config.get("type", "standard")
rules = config.get("rules", [])
commands = ["configure terminal"]
if acl_type == "standard":
commands.append(f"access-list {acl_id} standard")
else:
commands.append(f"access-list {acl_id} extended")
for rule in rules:
action = rule.get("action", "permit")
source = rule.get("source", "any")
destination = rule.get("destination", "any")
protocol = rule.get("protocol", "ip")
if acl_type == "standard":
commands.append(f"{action} {source}")
else:
commands.append(f"{action} {protocol} {source} {destination}")
commands.append("end")
return await self._send_commands(switch_ip, commands)
async def _configure_route(self, switch_ip: str, config: Dict[str, Any]) -> str:
"""
配置路由
"""
network = config["network"]
mask = config["mask"]
next_hop = config["next_hop"]
commands = [
"configure terminal",
f"ip route {network} {mask} {next_hop}",
"end"
]
return await self._send_commands(switch_ip, commands)

View File

@ -1,20 +0,0 @@
from fastapi import APIRouter, BackgroundTasks
from pydantic import BaseModel
from ...services.task_service import deploy_to_device
router = APIRouter()
class TopologyRequest(BaseModel):
devices: list
config: dict
@router.post("/deploy")
async def deploy_topology(
request: TopologyRequest,
bg_tasks: BackgroundTasks
):
task_ids = []
for device in request.devices:
task = deploy_to_device.delay(device, request.config)
task_ids.append(task.id)
return {"task_ids": task_ids}

View File

@ -1 +0,0 @@
# 这个文件保持为空用于标识app为一个Python包

View File

@ -1,22 +0,0 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from src.backend.app.api.command_parser import router as api_router
from src.backend.config import settings
app = FastAPI(title=settings.app_name)
# CORS配置
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# 包含API路由
app.include_router(api_router, prefix="/api")
@app.get("/")
async def root():
return {"message": "Network Configuration API is running"}

View File

@ -1,10 +1,30 @@
from .device import DeviceCredentials, DeviceInfo
from .topology import TopologyType, DeviceRole, NetworkTopology
# 数据模型模块初始化文件
# 目前项目不使用数据库,此文件保持为空
# 未来如果需要添加数据库模型,可以在这里定义
__all__ = [
'DeviceCredentials',
'DeviceInfo',
'TopologyType',
'DeviceRole',
'NetworkTopology'
]
from pydantic import BaseModel
from typing import Optional
# 示例:基础响应模型
class BaseResponse(BaseModel):
success: bool
message: Optional[str] = None
data: Optional[dict] = None
# 示例:交换机连接信息模型(如果需要存储交换机信息)
class SwitchInfo(BaseModel):
ip: str
username: str
password: str
model: Optional[str] = None
description: Optional[str] = None
# 示例:配置历史记录模型
class ConfigHistory(BaseModel):
command: str
config: dict
timestamp: float
status: str # success/failed
error: Optional[str] = None
__all__ = ["BaseResponse", "SwitchInfo", "ConfigHistory"]

View File

@ -1,14 +0,0 @@
from pydantic import BaseModel
from typing import Optional
class DeviceCredentials(BaseModel):
username: str
password: str
enable_password: Optional[str] = None
class DeviceInfo(BaseModel):
ip: str
vendor: str
model: Optional[str] = None
os_version: Optional[str] = None
credentials: DeviceCredentials

View File

@ -1,20 +0,0 @@
#拓补数据结构
from enum import Enum
from typing import Dict, List
from pydantic import BaseModel
class TopologyType(str, Enum):
SPINE_LEAF = "spine-leaf"
CORE_ACCESS = "core-access"
RING = "ring"
class DeviceRole(str, Enum):
CORE = "core"
SPINE = "spine"
LEAF = "leaf"
ACCESS = "access"
class NetworkTopology(BaseModel):
type: TopologyType
devices: Dict[DeviceRole, List[str]]
links: Dict[str, List[str]]

View File

@ -1,42 +0,0 @@
from prometheus_client import (
Counter,
Gauge,
Histogram,
Summary
)
# API Metrics
API_REQUESTS = Counter(
'api_requests_total',
'Total API requests',
['method', 'endpoint', 'status']
)
API_LATENCY = Histogram(
'api_request_latency_seconds',
'API request latency',
['endpoint']
)
# Device Metrics
DEVICE_CONNECTIONS = Gauge(
'network_device_connections',
'Active device connections',
['vendor']
)
CONFIG_APPLY_TIME = Summary(
'config_apply_seconds',
'Time spent applying configurations'
)
# Error Metrics
CONFIG_ERRORS = Counter(
'config_errors_total',
'Configuration errors',
['error_type']
)
def observe_api_request(method: str, endpoint: str, status: int, duration: float):
API_REQUESTS.labels(method, endpoint, status).inc()
API_LATENCY.labels(endpoint).observe(duration)

View File

@ -1,31 +0,0 @@
from prometheus_client import Counter, Histogram
from fastapi import Request
REQUESTS = Counter(
'api_requests_total',
'Total API Requests',
['method', 'endpoint']
)
LATENCY = Histogram(
'api_request_latency_seconds',
'API Request Latency',
['endpoint']
)
async def monitor_requests(request: Request, call_next):
start_time = time.time()
response = await call_next(request)
latency = time.time() - start_time
REQUESTS.labels(
method=request.method,
endpoint=request.url.path
).inc()
LATENCY.labels(
endpoint=request.url.path
).observe(latency)
return response

View File

@ -1,16 +0,0 @@
from .task_service import celery_app
from .ai_service import AIService
from .topology import TopologyService
from .batch import BatchService
# 单例服务实例
ai_service = AIService()
topology_service = TopologyService()
batch_service = BatchService()
__all__ = [
'celery_app',
'ai_service',
'topology_service',
'batch_service'
]

View File

@ -1,59 +1,61 @@
import aiohttp
import logging
import json
import httpx
from typing import Dict, Any
from src.backend.config import settings
logger = logging.getLogger(__name__)
from src.backend.app.utils.exceptions import SiliconFlowAPIException
async def call_ai_api(command: str, device_type: str, vendor: str, api_key: str) -> Dict[str, Any]:
"""
调用硅基流动API解析中文命令
参数:
- command: 中文配置命令
- device_type: 设备类型
- vendor: 设备厂商
- api_key: API密钥
返回:
- 解析后的配置和状态信息
"""
url = settings.ai_api_url
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
payload = {
"command": command,
"device_type": device_type,
"vendor": vendor,
"output_format": "json"
}
try:
async with aiohttp.ClientSession() as session:
async with session.post(url, json=payload, headers=headers) as response:
if response.status != 200:
error = await response.text()
logger.error(f"AI API error: {error}")
return {
"success": False,
"message": f"AI API returned {response.status}: {error}"
}
data = await response.json()
return {
"success": True,
"config": data.get("config", {}),
"message": data.get("message", "Command parsed successfully")
}
except Exception as e:
logger.error(f"Error calling AI API: {str(e)}")
return {
"success": False,
"message": f"Error calling AI API: {str(e)}"
class AIService:
def __init__(self, api_key: str, api_url: str):
self.api_key = api_key
self.api_url = api_url
self.headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
async def parse_command(self, command: str) -> Dict[str, Any]:
"""
调用硅基流动API解析中文命令
"""
prompt = f"""
你是一个网络设备配置专家请将以下中文命令转换为网络设备配置JSON
支持的配置包括VLAN端口路由ACL等
返回格式必须为JSON包含配置类型和详细参数
命令{command}
"""
data = {
"model": "text-davinci-003",
"prompt": prompt,
"max_tokens": 1000,
"temperature": 0.3
}
try:
async with httpx.AsyncClient() as client:
response = await client.post(
f"{self.api_url}/completions",
headers=self.headers,
json=data,
timeout=30
)
if response.status_code != 200:
raise SiliconFlowAPIException(response.text)
result = response.json()
config_str = result["choices"][0]["text"].strip()
# 确保返回的是有效的JSON
try:
config = json.loads(config_str)
return config
except json.JSONDecodeError:
# 尝试修复可能的多余字符
if config_str.startswith("```json"):
config_str = config_str[7:-3].strip()
return json.loads(config_str)
raise SiliconFlowAPIException("Invalid JSON format returned from AI")
except httpx.HTTPError as e:
raise SiliconFlowAPIException(str(e))

View File

@ -1,48 +0,0 @@
import asyncio
from typing import List, Dict, Any
from app.adapters.factory import AdapterFactory
from app.utils.connection_pool import ConnectionPool
from app.monitoring.metrics import (
DEVICE_CONNECTIONS,
CONFIG_APPLY_TIME,
CONFIG_ERRORS
)
class BatchService:
def __init__(self, max_workers: int = 10):
self.semaphore = asyncio.Semaphore(max_workers)
self.pool = ConnectionPool()
@CONFIG_APPLY_TIME.time()
async def deploy_batch(self, devices: List[Dict], config: Dict[str, Any]):
async def _deploy(device):
vendor = device.get('vendor', 'cisco')
async with self.semaphore:
try:
adapter = AdapterFactory.get_adapter(vendor)
await adapter.connect(device['ip'], device['credentials'])
DEVICE_CONNECTIONS.labels(vendor).inc()
result = await adapter.deploy_config(config)
return {
"device": device['ip'],
"status": "success",
"result": result
}
except ConnectionError as e:
CONFIG_ERRORS.labels("connection").inc()
return {
"device": device['ip'],
"status": "failed",
"error": str(e)
}
finally:
if adapter:
await adapter.disconnect()
DEVICE_CONNECTIONS.labels(vendor).dec()
return await asyncio.gather(
*[_deploy(device) for device in devices],
return_exceptions=True
)

View File

@ -1,18 +0,0 @@
#Celery任务定义
from celery import Celery
from src.backend.app.utils.connection_pool import ConnectionPool
from src.backend.config import settings
celery = Celery(__name__, broker=settings.REDIS_URL)
pool = ConnectionPool(max_size=settings.MAX_CONNECTIONS)
@celery.task
async def deploy_to_device(device_info: dict, config: dict):
adapter = await pool.get(device_info['vendor'])
try:
await adapter.connect(device_info['ip'], device_info['credentials'])
result = await adapter.deploy_config(config)
await pool.release(adapter)
return {'device': device_info['ip'], 'result': result}
except Exception as e:
return {'device': device_info['ip'], 'error': str(e)}

View File

@ -1,23 +0,0 @@
#拓补处理逻辑
def generate_multi_device_config(topology):
"""
topology示例:
{
"core_switches": [sw1, sw2],
"access_switches": {
"sw1": [sw3, sw4],
"sw2": [sw5, sw6]
}
}
"""
configs = {}
# 生成核心层配置如MSTP根桥选举
for sw in topology['core_switches']:
configs[sw] = generate_core_config(sw)
# 生成接入层配置(如端口绑定)
for core_sw, access_sws in topology['access_switches'].items():
for sw in access_sws:
configs[sw] = generate_access_config(sw, uplink=core_sw)
return configs

View File

@ -1,22 +0,0 @@
#连接池
# /backend/app/utils/connection_pool.py
import asyncio
from collections import deque
from ..adapters import cisco, huawei
class ConnectionPool:
def __init__(self, max_size=10):
self.max_size = max_size
self.pool = deque(maxlen=max_size)
self.lock = asyncio.Lock()
async def get(self, vendor: str):
async with self.lock:
if self.pool:
return self.pool.pop()
return CiscoAdapter() if vendor == 'cisco' else HuaweiAdapter()
async def release(self, adapter):
async with self.lock:
if len(self.pool) < self.max_size:
self.pool.append(adapter)

View File

@ -0,0 +1,22 @@
from fastapi import HTTPException, status
class AICommandParseException(HTTPException):
def __init__(self, detail: str):
super().__init__(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"AI command parse error: {detail}"
)
class SwitchConfigException(HTTPException):
def __init__(self, detail: str):
super().__init__(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Switch configuration error: {detail}"
)
class SiliconFlowAPIException(HTTPException):
def __init__(self, detail: str):
super().__init__(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail=f"SiliconFlow API error: {detail}"
)

View File

@ -0,0 +1,33 @@
import logging
from loguru import logger
import sys
class InterceptHandler(logging.Handler):
def emit(self, record):
# Get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
# Find caller from where originated the logged message
frame, depth = logging.currentframe(), 2
while frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
def setup_logging():
# 拦截标准logging
logging.basicConfig(handlers=[InterceptHandler()], level=0)
# 配置loguru
logger.configure(
handlers=[
{"sink": sys.stdout,
"format": "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>"}
]
)

View File

@ -1,9 +0,0 @@
broker_url = 'redis://redis:6379/0'
result_backend = 'redis://redis:6379/1'
task_serializer = 'json'
result_serializer = 'json'
accept_content = ['json']
timezone = 'UTC'
enable_utc = True
task_track_started = True
task_time_limit = 300

View File

@ -1,17 +1,23 @@
from pydantic_settings import BaseSettings
from pydantic import Field
import os
from pydantic import BaseSettings
class Settings(BaseSettings):
app_name: str = "Network Automation API"
redis_url: str = Field("redis://localhost:6379", env="REDIS_URL")
ai_api_key: str = Field(..., env="AI_API_KEY")
max_connections: int = Field(50, env="MAX_CONNECTIONS")
default_timeout: int = Field(30, env="DEFAULT_TIMEOUT")
APP_NAME: str = "AI Network Configurator"
DEBUG: bool = True
API_PREFIX: str = "/api"
# 硅基流动API配置
SILICONFLOW_API_KEY: str = os.getenv("SILICONFLOW_API_KEY", "")
SILICONFLOW_API_URL: str = os.getenv("SILICONFLOW_API_URL", "https://api.siliconflow.ai/v1")
# 交换机配置
SWITCH_USERNAME: str = os.getenv("SWITCH_USERNAME", "admin")
SWITCH_PASSWORD: str = os.getenv("SWITCH_PASSWORD", "admin")
SWITCH_TIMEOUT: int = os.getenv("SWITCH_TIMEOUT", 10)
class Config:
env_file = ".env"
extra = "ignore"
settings = Settings()

View File

@ -1,11 +0,0 @@
class InvalidInputError(Exception):
"""输入验证异常"""
pass
class NetworkDeviceError(Exception):
"""网络设备通信异常"""
pass
class AIServiceError(Exception):
"""AI服务异常"""
pass

View File

@ -1,8 +1,7 @@
fastapi==0.109.1
uvicorn==0.27.0
fastapi==0.95.2
uvicorn==0.22.0
python-dotenv==1.0.0
celery==5.3.6
redis==4.6.0
netmiko==4.2.0
asyncssh==2.14.10.0
prometheus-client==0.2
requests==2.28.2
paramiko==3.1.0
pydantic==1.10.7
loguru==0.7.0

View File

@ -1,11 +1,13 @@
import uvicorn
from app.main import app
from src.backend.app import create_app
app = create_app()
if __name__ == "__main__":
uvicorn.run(
"app.main:app",
app,
host="0.0.0.0",
port=8000,
reload=True,
workers=1
log_level="info",
reload=True
)