mirror of
https://github.com/EasyTier/EasyTier.git
synced 2026-05-07 10:14:35 +00:00
refactor(rpc): Centralize RPC service and unify API (#1427)
This change introduces a major refactoring of the RPC service layer to improve modularity, unify the API, and simplify the overall architecture. Key changes: - Replaced per-network-instance RPC services with a single global RPC server, reducing resource usage and simplifying management. - All clients (CLI, Web UI, etc.) now interact with EasyTier core through a unified RPC entrypoint, enabling consistent authentication and control. - RPC implementation logic has been moved to `easytier/src/rpc_service/` and organized by functionality (e.g., `instance_manage.rs`, `peer_manage.rs`, `config.rs`) for better maintainability. - Standardized Protobuf API definitions under `easytier/src/proto/` with an `api_` prefix (e.g., `cli.proto` → `api_instance.proto`) to provide a consistent interface. - CLI commands now require explicit `--instance-id` or `--instance-name` when multiple network instances are running; the parameter is optional when only one instance exists. BREAKING CHANGE: RPC portal configuration (`rpc_portal` and `rpc_portal_whitelist`) has been removed from per-instance configs and the Web UI. The RPC listen address must now be specified globally via the `--rpc-portal` command-line flag or the `ET_RPC_PORTAL` environment variable, as there is only one RPC service for the entire application.
This commit is contained in:
@@ -0,0 +1,108 @@
|
||||
mod acl_manage;
|
||||
mod api;
|
||||
mod config;
|
||||
mod connector_manage;
|
||||
mod mapped_listener_manage;
|
||||
mod peer_manage;
|
||||
mod port_forward_manage;
|
||||
mod proxy;
|
||||
mod stats;
|
||||
mod vpn_portal;
|
||||
|
||||
pub mod instance_manage;
|
||||
pub mod logger;
|
||||
|
||||
pub type ApiRpcServer = self::api::ApiRpcServer;
|
||||
|
||||
pub trait InstanceRpcService: Sync + Send {
|
||||
fn get_peer_manage_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::PeerManageRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_connector_manage_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::ConnectorManageRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_mapped_listener_manage_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::MappedListenerManageRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_vpn_portal_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::VpnPortalRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_proxy_service(
|
||||
&self,
|
||||
client_type: &str,
|
||||
) -> Option<
|
||||
std::sync::Arc<
|
||||
dyn crate::proto::api::instance::TcpProxyRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
> + Send
|
||||
+ Sync,
|
||||
>,
|
||||
>;
|
||||
fn get_acl_manage_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::AclManageRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_port_forward_manage_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::PortForwardManageRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_stats_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::instance::StatsRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
fn get_config_service(
|
||||
&self,
|
||||
) -> &dyn crate::proto::api::config::ConfigRpc<
|
||||
Controller = crate::proto::rpc_types::controller::BaseController,
|
||||
>;
|
||||
}
|
||||
|
||||
fn get_instance_service(
|
||||
instance_manager: &std::sync::Arc<crate::instance_manager::NetworkInstanceManager>,
|
||||
identifier: &Option<crate::proto::api::instance::InstanceIdentifier>,
|
||||
) -> Result<std::sync::Arc<dyn InstanceRpcService>, anyhow::Error> {
|
||||
use crate::proto::api;
|
||||
let selector = identifier.as_ref().and_then(|s| s.selector.as_ref());
|
||||
|
||||
let id = if let Some(api::instance::instance_identifier::Selector::Id(id)) = selector {
|
||||
(*id).into()
|
||||
} else {
|
||||
let ids = instance_manager.filter_network_instance(|_, i| {
|
||||
if let Some(api::instance::instance_identifier::Selector::InstanceSelector(selector)) =
|
||||
selector
|
||||
{
|
||||
if let Some(name) = selector.name.as_ref() {
|
||||
if i.get_inst_name() != *name {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
true
|
||||
});
|
||||
match ids.len() {
|
||||
0 => return Err(anyhow::anyhow!("No instance matches the selector")),
|
||||
1 => ids[0],
|
||||
_ => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"{} instances match the selector, please specify the instance ID",
|
||||
ids.len()
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
instance_manager
|
||||
.get_instance_service(&id)
|
||||
.ok_or_else(|| anyhow::anyhow!("Instance not found or API service not available"))
|
||||
}
|
||||
Reference in New Issue
Block a user