目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,下面是具体要求,和已有代码 ...

생성일: 2026년 1월 28일

답변에 사용된 모델 GPT-5.2 Thinking by Chat01

질문

目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,下面是具体要求,和已有代码
0、可以针对 自动注入的mapper字段适配 和 mapper.java中的具体方法适配,已经目前已有的mapper方法类的适配。
1、数据源的优先级 字段 > 方法 > 类,
2、在service中对多个字段进行标注@TargetDataSource,其中的方法使用多个不同数据源,那么表示这个方法需要调用不同数据源来完成业务操作。
3、如果AService的A方法分别调用BService的B方法和CService的C方法,其中B和C用自动注入同一个mapper的方法,但不是同一个数据源@TargetDataSource,会导致运行出错么?如何解决

自定义注解

java
package com.wiscom.basic.config.datasource; import java.lang.annotation.*; @Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface TargetDataSource { String name() default ""; }

数据源切换和注解解析

java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.util.Objects; @Component @Aspect @Slf4j @Order(-100) public class TargetDataSourceAspect { /** * 切入点:所有dao包下接口文件 */ @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() { } /** * 设置具体数据源 * * @param joinPoint 切点 * @return Object * @throws Throwable 异常 */ @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { // 获取当前指定的获取mapper接口数据源 MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Class<?> targetInterface = ms.getDeclaringType(); TargetDataSource dataSource = targetInterface.getAnnotation(TargetDataSource.class); /* 没有匹配到的话,使用默认数据源,否则设置到动态数据源上下文中 */ if (Objects.isNull(dataSource)) { DynamicDataSource.setDataSource("elss"); log.debug("使用默认数据源"); } else { DynamicDataSource.setDataSource(dataSource.name()); log.debug("匹配到数据源:{}", dataSource.name()); } try { return joinPoint.proceed(); } finally { /* 方法执行完毕之后,销毁当前数据源信息,进行垃圾回收 */ DynamicDataSource.clearDataSource(); log.debug("当前数据源已清空"); } } }

其他方法

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { private static final ThreadLocal<String> DB_CONTEXT_HOLDER = new ThreadLocal<>(); /** * 取得当前使用的数据源 * * @return 当前使用的数据源 */ @Override protected Object determineCurrentLookupKey() { return getDataSource(); } /** * 设置数据源 * * @param dataSource 数据源 */ public static void setDataSource(String dataSource) { DB_CONTEXT_HOLDER.set(dataSource); } /** * 获取当前数据源 * * @return 数据源 */ public static String getDataSource() { return DB_CONTEXT_HOLDER.get(); } /** * 清除上下文 */ public static void clearDataSource() { DB_CONTEXT_HOLDER.remove(); } /** * 设置默认数据源,和可切换的数据源Map * * @param defaultTargetDataSource 默认数据源 * @param targetDataSources 可切换的数据源Map */ public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }
java
package com.wiscom.basic.config.datasource; import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.stereotype.Component; import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; @Configuration @Component public class DataSourceConfig { /** * 配置弃电系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "elss") @ConfigurationProperties(prefix = "spring.datasource.elss") public DataSource pgsqlSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置统调弃电数据源 * * @return 数据源对象 */ @Bean(name = "elss_centrally") @ConfigurationProperties(prefix = "spring.datasource.elss-centrally") @ConditionalOnProperty(prefix = "spring.datasource.elss-centrally", name = "url") public DataSource pgsqlElssCentrallySource() { return DruidDataSourceBuilder.create().build(); } /** * 配置可视化系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "envs") @ConfigurationProperties(prefix = "spring.datasource.envs") @ConditionalOnProperty(prefix = "spring.datasource.envs", name = "url") public DataSource pgsqlEnvsSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置一平面同步pgsql数据源 * * @return 数据源对象 */ @Bean(name = "ypm-sync") @ConfigurationProperties(prefix = "spring.datasource.ypm-sync") @ConditionalOnProperty(prefix = "spring.datasource.ypm-sync", name = "url") public DataSource pgsqlYpmSyncSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化新一代达梦7数据源 * * @return 数据源对象 */ @Bean(name = "auto-dm") @ConfigurationProperties(prefix = "spring.datasource.auto-dm") @ConditionalOnProperty(prefix = "spring.datasource.auto-dm", name = "url") public DataSource autoDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化现货处达梦8 * * @return 数据源对象 */ @Bean(name = "ddc-dm") @ConfigurationProperties(prefix = "spring.datasource.ddc-dm") @ConditionalOnProperty(prefix = "spring.datasource.ddc-dm", name = "url") public DataSource ddcDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 数据源配置 * * @param elss 数据源elss * @param envs 数据源envs * @param ypmSync 可视化ypm同步库 * @param autoDm 自动化达梦数据库 * @param ddcDm 调度出达梦数据库 * @param elssCentrally 统调弃电系统数据库 * @return 动态数据源切换对象。 */ @Bean @Primary public DataSource dataSource(@Qualifier("elss") DataSource elss, @Autowired(required = false) @Qualifier("envs") DataSource envs, @Autowired(required = false) @Qualifier("ypm-sync") DataSource ypmSync, @Autowired(required = false) @Qualifier("auto-dm") DataSource autoDm, @Autowired(required = false) @Qualifier("ddc-dm") DataSource ddcDm, @Autowired(required = false) @Qualifier("elss_centrally") DataSource elssCentrally) { Map<Object, Object> dsMap = new HashMap<>(16); dsMap.put("elss", elss); if (envs != null) { dsMap.put("envs", envs); } if (ypmSync != null) { dsMap.put("ypm-sync", ypmSync); } if (autoDm != null) { dsMap.put("auto-dm", autoDm); } if (ddcDm != null) { dsMap.put("ddc-dm", ddcDm); } if (elssCentrally != null) { dsMap.put("elss-centrally", elssCentrally); } return new DynamicDataSource(elss, dsMap); } }

사고

Evaluating datasource resolution for mappers

I’m trying to figure out how to switch data sources for mappers in a service. The challenge is that annotations like @TargetDataSource need to be applied to individual fields in the service class to ensure each mapper knows which data source to use. It gets tricky because the same mapper interface might be injected into different fields, making it hard to track the data source dynamically.

Exploring dynamic datasource handling with proxies and annotations

The challenge is ensuring mappers injected into multiple services can work with different data sources. One idea is to adapt auto-injected mappers by wrapping them in a proxy that sets the correct datasource, based on the field. Another option is using AOP, but the requirement insists on field annotations taking priority over method annotations.

Streamlining datasource management with ThreadLocal stack

The simplest approach might be to implement @TargetDataSource on the mapper interface, method, and injected field, but this only works for using different data sources with the same mapper in different services. Another option is modifying DynamicDataSource to use a ThreadLocal stack, so nested service calls restore the previous data source when switching back.

Implementing dynamic datasource resolution for mappers

I’m considering a stack-based approach. First, if a method call is from a mapper proxy with a bound datasource, I’ll use that. If not, I'll check the method or class annotations for a specified datasource, then use a default if needed. For field-level proxies, I’ll create a custom proxy with BeanPostProcessor to ensure each injected mapper receives the correct datasource.

Handling datasource switching within transactions

It's important to consider that BService and CService will each have their own proxy with different datasources. The ThreadLocal stack ensures no conflicts when switching between different services. However, switching data sources mid-transaction with @Transactional can be tricky, as the transaction manager binds the connection to a single datasource. Using multiple data sources within one method may break this structure unless we use solutions like Seata or JTA for separate transactions or support multiple connections.

Handling multiple data sources with @Transactional

To manage multiple data sources in a method, we could use @Transactional(propagation = REQUIRES_NEW) for each DS, split logic into separate methods with their own annotations, or use XA/JTA for distributed transactions. Another approach is using programmatic transaction management with different transaction managers per DS. Regarding field proxies, they can handle DS switching for mapper calls, with priorities assigned to field > method > class annotations. We can implement an aspect to make decisions based on annotations and ensure proper handling with push/pop for DS management.

Managing default data source and field-level proxies

If the stack is empty, the default data source (‘elss’) can be used, but it might not always need to be pushed. For resolution, I'll push resolved or default if necessary. Also, for field proxies, I'll use a BeanPostProcessor to wrap map injection using JDK dynamic proxies, ensuring proper handling of any final fields, especially with constructor injections. I'll make sure the proxy correctly handles equals, hashCode, and other default methods.

Implementing injection processor for field proxies

The goal is to support field injection while considering non-final fields. If using constructor injection, consider ObjectProvider or proxy bean injection. For easier implementation, I'll work with @Autowired and high-priority @Order to register the processor. I'll create a BeanPostProcessor to wrap injection fields after initialization with appropriate handling for proxy creation, ensuring no double wrapping occurs. All this will be implemented with postProcessAfterInitialization.

Integrating field proxy and aspect for DS switching

The idea is to create a proxy that handles DataSourceBound methods and ensures correct data source handling. With field proxies, if a mapper method invokes the aspect, it shouldn't re-evaluate the data source. The field proxy will take charge of switching the data source, while the aspect should handle only service-related methods, avoiding conflicts with mapper calls, ensuring proper threadlocal DS management across method calls.

Handling mapper and service beans with priority

When applying data source annotations, the priority should be field > method > class. For mappers injected through fields, field proxies manage the data source, so the aspect won't apply. But for injected mappers, the aspect can handle method annotations and ensure the data source is set properly.

Ensuring correct data source handling for mapper fields

To solve the issue of handling different data sources for auto-injected mappers, we use a context container for managing data source switches. By using field-level proxies with a stack for each call, we can ensure proper isolation and restore data sources, even in nested or concurrent calls.

Proposing solution for multi-datasource and transaction handling

The current implementation struggles due to missing priority handling, and field-level proxy management. To resolve this, I propose using a stack-based context to manage data source switching. For multi-ds operations, separate transactions per datasource are necessary. I'll provide specific code modifications, ensuring correct functionality for both field and method-based annotations.

Addressing data source resolution and nesting issues

The current setup may have problems with thread-local data source management in nested calls. Service methods with multiple field annotations could lead to inconsistent data source choices. The solution adjusts the stack logic, ensuring that field-level and service-level annotations work together correctly, while preventing conflicts during nested calls. The aspect is refined to prioritize the most specific method or class within the resolved context.

Thought for 5m 52s

답변

下面给你一套可落地的改造方案,把你现有的“只适配 mapper.java 类注解”的能力,完善到:

  • ✅ 支持 字段注入的 mapper 上标注 @TargetDataSource(同一个 mapper 在不同 Service 里可标不同数据源)
  • ✅ 支持 mapper.java 方法级 @TargetDataSource
  • ✅ 支持 类级 @TargetDataSource
  • ✅ 实现优先级:字段 > 方法 > 类
  • ✅ 解决你现在实现里最大的坑:嵌套调用会把外层数据源 clear 掉(导致错库/跑默认库)

先说你当前实现的关键问题

问题 A:只看了 “DeclaringType 的类注解”

你现在是:

java
Class<?> targetInterface = ms.getDeclaringType(); TargetDataSource dataSource = targetInterface.getAnnotation(TargetDataSource.class);

这会导致:

  • 方法级注解拿不到
  • 如果是 Spring 代理(JDK/CGLIB),DeclaringType 可能是接口/代理类,类注解也可能拿不到
  • 字段注解完全没办法表达(因为 joinPoint 发生在 mapper 方法上,你已经不知道它来自哪个字段)

问题 B:clearDataSource 会破坏嵌套调用

你 finally 里直接 DynamicDataSource.clearDataSource(),典型场景:

  • AService 方法(想用 envs)
    • 调 BService 方法(切换到 elss)
      • BService 调 mapper(finally 清空)
    • 回到 AService 继续执行,此时数据源被清空 → 跑默认库 elss,直接错库。

所以必须把数据源上下文从 “单值 ThreadLocal” 改成 栈(stack):进入 push、退出 pop,才能恢复外层数据源。


总体设计(满足 0/1/2/3)

0)适配范围

  • 字段注入的 mapper:用一个 BeanPostProcessor 把带 @TargetDataSource 的 mapper 字段替换成“带数据源能力的代理”
  • mapper 方法:用 AOP 解析 method 注解
  • :用 AOP 解析 class 注解(你现在已有,但实现不稳)

1)优先级:字段 > 方法 > 类

  • 字段:由“字段代理”在调用 mapper 时直接 push 数据源(最高)
  • 方法:AOP 在执行方法前 push
  • 类:AOP 在执行方法前 push

2)一个 service 方法里用多个字段不同数据源

  • 调用 mapperA.xxx() 时 push dsA
  • 调用 mapperB.yyy() 时 push dsB
  • 每次 mapper 调用结束 pop,外层继续

⚠️ 但涉及事务时要注意(下面我会专门说)

3)AService 调 BService、CService,B/C 注入同一个 mapper 但数据源不同,会不会错?

  • 你现在的实现:很容易错(因为 mapper bean 是同一个,且你只按类注解决定)
  • 按下面方案:不会错
    • BService 字段会被替换成“绑定 elss 的 mapper 代理”
    • CService 字段会被替换成“绑定 envs 的 mapper 代理”
    • 即使底层真实 mapper bean 是同一个,也能做到“不同注入点不同数据源”

关键改造 1:把 ThreadLocal 改成栈(解决嵌套 clear)

DynamicDataSource 的上下文改成栈(推荐保留在同类里,最少改动):

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { private static final ThreadLocal<Deque<String>> DB_CONTEXT_HOLDER = ThreadLocal.withInitial(ArrayDeque::new); @Override protected Object determineCurrentLookupKey() { return peek(); } public static void push(String dataSource) { DB_CONTEXT_HOLDER.get().push(dataSource); } public static String peek() { Deque<String> deque = DB_CONTEXT_HOLDER.get(); return deque.isEmpty() ? null : deque.peek(); } public static void poll() { Deque<String> deque = DB_CONTEXT_HOLDER.get(); if (!deque.isEmpty()) { deque.pop(); } if (deque.isEmpty()) { DB_CONTEXT_HOLDER.remove(); } } public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }

这样外层数据源永远不会被内层 clear 掉,而是会被恢复。


关键改造 2:AOP 支持 “方法 > 类”,且不破坏外层

你现在 AOP 直接 set/clear,改成 push/poll,并且解析顺序:方法优先于类

java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.*; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.aop.framework.AopProxyUtils; import org.springframework.aop.support.AopUtils; import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import java.lang.reflect.Method; @Component @Aspect @Slf4j @Order(-100) // 保证比事务切面更早执行 public class TargetDataSourceAspect { @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() {} @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { String ds = resolveDataSource(joinPoint); boolean pushed = false; if (StringUtils.hasText(ds)) { DynamicDataSource.push(ds); pushed = true; log.debug("切换数据源 -> {}", ds); } else { // 没匹配到注解:不 push,让路由数据源走默认;并且不影响外层 log.debug("未匹配数据源注解,当前数据源栈顶 = {}", DynamicDataSource.peek()); } try { return joinPoint.proceed(); } finally { if (pushed) { DynamicDataSource.poll(); log.debug("恢复数据源,当前栈顶 = {}", DynamicDataSource.peek()); } } } private String resolveDataSource(ProceedingJoinPoint joinPoint) { MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Method method = ms.getMethod(); Class<?> targetClass = AopProxyUtils.ultimateTargetClass(joinPoint.getTarget()); Method specificMethod = AopUtils.getMostSpecificMethod(method, targetClass); // 1) 方法注解(优先) TargetDataSource mAnn = AnnotatedElementUtils.findMergedAnnotation(specificMethod, TargetDataSource.class); if (mAnn == null) { // 兼容接口方法上标注(mapper 接口通常在这里) mAnn = AnnotatedElementUtils.findMergedAnnotation(method, TargetDataSource.class); } if (mAnn != null && StringUtils.hasText(mAnn.name())) { return mAnn.name(); } // 2) 类注解 TargetDataSource cAnn = AnnotatedElementUtils.findMergedAnnotation(targetClass, TargetDataSource.class); if (cAnn != null && StringUtils.hasText(cAnn.name())) { return cAnn.name(); } // 3) 兼容接口类注解(mapper 接口可能标在接口上) for (Class<?> itf : targetClass.getInterfaces()) { TargetDataSource iAnn = AnnotatedElementUtils.findMergedAnnotation(itf, TargetDataSource.class); if (iAnn != null && StringUtils.hasText(iAnn.name())) { return iAnn.name(); } } return null; } }

到这里你已经完整支持:mapper 方法注解、类注解、service 方法注解/类注解,并且嵌套安全。

但还缺 字段优先级


关键改造 3:实现“字段 > 方法 > 类”(字段注入 mapper 代理)

核心思路:对 Service 中的字段:

java
@TargetDataSource("envs") @Autowired private XxxMapper mapper;

把这个 mapper 字段值替换成一个代理:每次调用 mapper 任意方法时,先 DynamicDataSource.push("envs"),执行完 poll()

这样字段天然优先于 AOP(因为字段代理在 mapper 调用时最后生效)。

3.1 定义一个标识接口(避免重复代理、也便于调试)

java
package com.wiscom.basic.config.datasource; public interface FieldBoundDataSourceProxy { String boundDataSource(); }

3.2 代理工厂

java
package com.wiscom.basic.config.datasource; import java.lang.reflect.*; import java.util.LinkedHashSet; import java.util.Set; public class DataSourceFieldProxyFactory { @SuppressWarnings("unchecked") public static <T> T wrap(T target, String dsName) { Class<?> targetClass = target.getClass(); Set<Class<?>> ifaces = new LinkedHashSet<>(); for (Class<?> i : targetClass.getInterfaces()) { ifaces.add(i); } ifaces.add(FieldBoundDataSourceProxy.class); InvocationHandler handler = (proxy, method, args) -> { // 处理 Object 基础方法 if (method.getDeclaringClass() == Object.class) { return method.invoke(target, args); } // 暴露绑定的数据源 if (method.getDeclaringClass() == FieldBoundDataSourceProxy.class) { return dsName; } DynamicDataSource.push(dsName); try { return method.invoke(target, args); } catch (InvocationTargetException e) { throw e.getTargetException(); } finally { DynamicDataSource.poll(); } }; return (T) Proxy.newProxyInstance( targetClass.getClassLoader(), ifaces.toArray(new Class[0]), handler ); } }

3.3 BeanPostProcessor:扫描字段并替换值

java
package com.wiscom.basic.config.datasource; import org.springframework.aop.framework.AopProxyUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.core.Ordered; import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import java.lang.reflect.Field; @Component @Order(Ordered.LOWEST_PRECEDENCE) public class TargetDataSourceFieldProxyPostProcessor implements BeanPostProcessor { @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { Class<?> clazz = AopProxyUtils.ultimateTargetClass(bean); ReflectionUtils.doWithFields(clazz, field -> processField(bean, field), this::needProcess); return bean; } private boolean needProcess(Field field) { TargetDataSource ann = AnnotatedElementUtils.findMergedAnnotation(field, TargetDataSource.class); if (ann == null || !StringUtils.hasText(ann.name())) { return false; } // 只处理接口类型(MyBatis mapper 通常为接口代理) return field.getType().isInterface(); } private void processField(Object bean, Field field) throws IllegalAccessException { TargetDataSource ann = AnnotatedElementUtils.findMergedAnnotation(field, TargetDataSource.class); if (ann == null) return; ReflectionUtils.makeAccessible(field); Object value = field.get(bean); if (value == null) return; // 已经是字段代理就跳过 if (value instanceof FieldBoundDataSourceProxy) return; Object wrapped = DataSourceFieldProxyFactory.wrap(value, ann.name()); field.set(bean, wrapped); } }

到这里,0/1/2 都满足了

✅ 字段 > 方法 > 类 如何体现?

  • 字段:字段代理每次调用 mapper 前 push(最高优先级)
  • 方法:AOP 在进入方法前 push
  • 类:AOP 在进入方法前 push
  • 栈模型使得字段 push 后一定覆盖外层;执行完恢复外层

回答你的第 3 点:会导致运行出错么?如何解决?

在你当前实现下:很容易出错

原因:

  1. BService 与 CService 注入的是同一个 mapper bean(同一个代理对象)
  2. 你现在只按 mapper 接口类注解决定数据源,根本无法区分“这个 mapper 是从哪个 Service 字段调用的”
  3. 再叠加 clearDataSource() 会清空外层上下文,嵌套调用更容易错库

用上面方案后:不会出错

  • BService 的 mapper 字段被替换成“绑定 dsB 的代理”
  • CService 的 mapper 字段被替换成“绑定 dsC 的代理”
  • AService 调 BService、CService 时,各自 mapper 调用会切到各自数据源,且栈会恢复

事务(非常关键,决定你第 2 点能不能“真正确保”)

你第 2 点说“一个方法要调用多个不同数据源完成业务”,这里要直说:

不加事务/只读场景:上面方案没问题
⚠️ 如果这个方法被 @Transactional 包住(Spring 默认单事务管理器 + AbstractRoutingDataSource):

  • 事务开始时拿到的 Connection 很可能会绑定到线程
  • 后面你再切换数据源,可能仍复用同一个 Connection → 实际还是跑在第一个数据源 或者行为不可控

解决建议(常用落地方式)

把不同数据源的操作拆成不同的事务边界,例如:

java
public void biz() { stepOnElss(); // @TargetDataSource("elss") @Transactional(REQUIRES_NEW) stepOnEnvs(); // @TargetDataSource("envs") @Transactional(REQUIRES_NEW) }

或者明确不在一个事务里做跨库(更稳)。


最小示例:字段标注多个数据源(满足你的第 2 点表达)

java
@Service public class DemoService { @TargetDataSource(name = "elss") @Autowired private UserMapper userMapperElss; @TargetDataSource(name = "envs") @Autowired private UserMapper userMapperEnvs; public void doBiz() { userMapperElss.selectA(); // 自动切 elss userMapperEnvs.selectB(); // 自动切 envs } }

如果你愿意,我也可以按你们工程的实际包结构/代理方式(JDK 代理还是 CGLIB、mapper 扫描方式、是否大量 @Transactional)把 Pointcut、字段过滤规则(只处理 mapper 包或 @Mapper 注解)进一步收紧,避免误代理普通接口字段。

질문

按照上述方法配置后报错,在调用measuredPowerList方法时候,stationInfoMapper没有使用指定的数据源,即便字段有注解,mapper的类上有注解,显示:切换数据源 -> ypm-sync,但查询时候没有查到数据报错了(这里try所以没有打印没找表的错误信息)

log
[DEBUG] [2026-01-28 18:12:57] [TargetDataSourceAspect.java-37] 未匹配数据源注解,当前数据源栈顶:null,使用默认数据源 [DEBUG] [2026-01-28 18:12:57] [BaseJdbcLogger.java-135] ==> Preparing: SELECT station_id,out_sys_id,name,abbr,district_id,region_id,partition_id,category,station_type,planning_capacity,margin,grid_status,grid_date,company_id,method_type,predict_factory,access_ugc,longitude,latitude,remark,creator,updater,create_time,update_time,delete_flag FROM power_station WHERE (station_id = ? AND delete_flag = ?) [DEBUG] [2026-01-28 18:12:57] [BaseJdbcLogger.java-135] ==> Parameters: 200000091(Long), false(Boolean) [DEBUG] [2026-01-28 18:12:57] [BaseJdbcLogger.java-135] <== Total: 1 [DEBUG] [2026-01-28 18:12:57] [TargetDataSourceAspect.java-34] 切换数据源 -> ypm-sync [DEBUG] [2026-01-28 18:12:57] [BaseJdbcLogger.java-135] ==> Preparing: SELECT id,name,abbr,type,district,capacity_plan,capacity_grid,latitude,longitude,access_time,update_time FROM station_info WHERE (abbr = ?) [DEBUG] [2026-01-28 18:12:57] [BaseJdbcLogger.java-135] ==> Parameters: ZDJSGF(String) [DEBUG] [2026-01-28 18:12:57] [TargetDataSourceAspect.java-45] 恢复数据源,当前栈顶 = null
java
package com.wiscom.sync.dao; import com.wiscom.basic.enums.DataEnum; import com.wiscom.station.mapper.PowerStationMapper; import com.wiscom.station.model.entity.PowerStation; import com.wiscom.sync.mapper.MeasuredPowerStationMapper; import com.wiscom.sync.mapper.PowerDataMapper; import com.wiscom.sync.mapper.StationInfoMapper; import com.wiscom.sync.model.entity.MeasuredPowerStation; import com.wiscom.sync.model.entity.PowerData; import com.wiscom.sync.model.entity.StationInfo; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Repository; import javax.annotation.Resource; import java.time.LocalDateTime; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @Slf4j @Repository public class PowerDataDao { @Resource private PowerStationMapper stationMapper; @Resource @TargetDataSource(name = "ypm-sync") private StationInfoMapper stationInfoMapper; @Resource private MeasuredPowerStationMapper measuredPowerStationMapper; @Resource private PowerDataMapper powerDataMapper; /** * 查询可视化文件上报实际功率(场站id映射ypm-sync同步id) * * @param stationId 场站id(弃电场站id) * @param startTime 开始时间 * @param endTime 结束时间 * @return ypm文件上报功率 */ public List<MeasuredPowerStation> measuredPowerList(Long stationId, LocalDateTime startTime, LocalDateTime endTime) { PowerStation powerStation = stationMapper.stationDetail(stationId); StationInfo stationInfo = stationInfoMapper.stationByAbbr(powerStation.getAbbr()); if (stationInfo == null) { return Collections.emptyList(); } return measuredPowerStationMapper.measuredPowerList(Long.valueOf(stationInfo.getId()), startTime, endTime) .stream().peek(o -> o.setStationId(stationId)).collect(Collectors.toList()); } }
java
package com.wiscom.sync.mapper; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.wiscom.basic.config.datasource.TargetDataSource; import com.wiscom.sync.model.entity.StationInfo; import org.apache.ibatis.annotations.Mapper; import java.util.List; @Mapper @TargetDataSource(name = "ypm-sync") public interface StationInfoMapper extends BaseMapper<StationInfo> { /** * 获取所有站点信息(可视化台账) * * @return 站点信息列表 */ default List<StationInfo> listAll() { return selectList(new LambdaQueryWrapper<>()); } /** * 根据缩写查询场站信息 * * @param abbr 缩写 * @return 场站信息 */ default StationInfo stationByAbbr(String abbr) { return selectOne(new LambdaUpdateWrapper<StationInfo>().eq(StationInfo::getAbbr, abbr)); } }

이 Q&A 공유하기