目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,可以针对 自动注入的mapp...

创建于:2026年1月28日

使用 GPT-5.2 Thinking 回答 Chat01

问题

目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,可以针对 自动注入的mapper字段适配,mapper.java中的具体方法适配,已经目前已有的mapper方法类的适配。如果都有数据源注解,那么优先级 字段 > 方法 > 类,下面是已有代码

自定义注解

java
package com.wiscom.basic.config.datasource; import java.lang.annotation.*; @Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface TargetDataSource { String name() default ""; }

数据源切换和注解解析

java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.util.Objects; @Component @Aspect @Slf4j @Order(-100) public class TargetDataSourceAspect { /** * 切入点:所有dao包下接口文件 */ @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() { } /** * 设置具体数据源 * * @param joinPoint 切点 * @return Object * @throws Throwable 异常 */ @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { // 获取当前指定的获取mapper接口数据源 MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Class<?> targetInterface = ms.getDeclaringType(); TargetDataSource dataSource = targetInterface.getAnnotation(TargetDataSource.class); /* 没有匹配到的话,使用默认数据源,否则设置到动态数据源上下文中 */ if (Objects.isNull(dataSource)) { DynamicDataSource.setDataSource("elss"); log.debug("使用默认数据源"); } else { DynamicDataSource.setDataSource(dataSource.name()); log.debug("匹配到数据源:{}", dataSource.name()); } try { return joinPoint.proceed(); } finally { /* 方法执行完毕之后,销毁当前数据源信息,进行垃圾回收 */ DynamicDataSource.clearDataSource(); log.debug("当前数据源已清空"); } } }

其他方法

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { private static final ThreadLocal<String> DB_CONTEXT_HOLDER = new ThreadLocal<>(); /** * 取得当前使用的数据源 * * @return 当前使用的数据源 */ @Override protected Object determineCurrentLookupKey() { return getDataSource(); } /** * 设置数据源 * * @param dataSource 数据源 */ public static void setDataSource(String dataSource) { DB_CONTEXT_HOLDER.set(dataSource); } /** * 获取当前数据源 * * @return 数据源 */ public static String getDataSource() { return DB_CONTEXT_HOLDER.get(); } /** * 清除上下文 */ public static void clearDataSource() { DB_CONTEXT_HOLDER.remove(); } /** * 设置默认数据源,和可切换的数据源Map * * @param defaultTargetDataSource 默认数据源 * @param targetDataSources 可切换的数据源Map */ public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }
java
package com.wiscom.basic.config.datasource; import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.stereotype.Component; import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; @Configuration @Component public class DataSourceConfig { /** * 配置弃电系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "elss") @ConfigurationProperties(prefix = "spring.datasource.elss") public DataSource pgsqlSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置统调弃电数据源 * * @return 数据源对象 */ @Bean(name = "elss_centrally") @ConfigurationProperties(prefix = "spring.datasource.elss-centrally") @ConditionalOnProperty(prefix = "spring.datasource.elss-centrally", name = "url") public DataSource pgsqlElssCentrallySource() { return DruidDataSourceBuilder.create().build(); } /** * 配置可视化系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "envs") @ConfigurationProperties(prefix = "spring.datasource.envs") @ConditionalOnProperty(prefix = "spring.datasource.envs", name = "url") public DataSource pgsqlEnvsSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置一平面同步pgsql数据源 * * @return 数据源对象 */ @Bean(name = "ypm-sync") @ConfigurationProperties(prefix = "spring.datasource.ypm-sync") @ConditionalOnProperty(prefix = "spring.datasource.ypm-sync", name = "url") public DataSource pgsqlYpmSyncSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化新一代达梦7数据源 * * @return 数据源对象 */ @Bean(name = "auto-dm") @ConfigurationProperties(prefix = "spring.datasource.auto-dm") @ConditionalOnProperty(prefix = "spring.datasource.auto-dm", name = "url") public DataSource autoDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化现货处达梦8 * * @return 数据源对象 */ @Bean(name = "ddc-dm") @ConfigurationProperties(prefix = "spring.datasource.ddc-dm") @ConditionalOnProperty(prefix = "spring.datasource.ddc-dm", name = "url") public DataSource ddcDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 数据源配置 * * @param elss 数据源elss * @param envs 数据源envs * @param ypmSync 可视化ypm同步库 * @param autoDm 自动化达梦数据库 * @param ddcDm 调度出达梦数据库 * @param elssCentrally 统调弃电系统数据库 * @return 动态数据源切换对象。 */ @Bean @Primary public DataSource dataSource(@Qualifier("elss") DataSource elss, @Autowired(required = false) @Qualifier("envs") DataSource envs, @Autowired(required = false) @Qualifier("ypm-sync") DataSource ypmSync, @Autowired(required = false) @Qualifier("auto-dm") DataSource autoDm, @Autowired(required = false) @Qualifier("ddc-dm") DataSource ddcDm, @Autowired(required = false) @Qualifier("elss_centrally") DataSource elssCentrally) { Map<Object, Object> dsMap = new HashMap<>(16); dsMap.put("elss", elss); if (envs != null) { dsMap.put("envs", envs); } if (ypmSync != null) { dsMap.put("ypm-sync", ypmSync); } if (autoDm != null) { dsMap.put("auto-dm", autoDm); } if (ddcDm != null) { dsMap.put("ddc-dm", ddcDm); } if (elssCentrally != null) { dsMap.put("elss-centrally", elssCentrally); } return new DynamicDataSource(elss, dsMap); } }

思考

Considering aspect-based data source switching

It seems like using AOP on the mapper methods could be effective, as the target would be a proxy. If the caller’s field is annotated with @TargetDataSource, we could inspect the context of the class or field, but using JoinPoint.getThis() or getTarget() might not give enough info on the caller. Alternatively, triggering on the service method itself and picking the data source from a field annotation might make more sense.

Determining which mapper field to use

It seems like the mapping logic should depend on whether the service or method has the @TargetDataSource annotation. Still, if there are several mappers, it would be tricky to choose the correct one for each call. Maybe I could look at the class or method level, or check if a field in the service/DAO is annotated with @TargetDataSource.

Considering field-level data source adaptation

If I apply an aspect around service methods, I can check for @TargetDataSource annotations on auto-injected mapper fields in the service class. If there’s only one annotated field, I can pick that data source, but if there are multiple, it could get ambiguous. Alternatively, using BeanPostProcessor to wrap the mappers could also solve this issue.

Optimizing dynamic data source annotation handling

Spring’s injection metadata can be leveraged, but creating a proxy for each injection point might be too heavy. Instead, supporting field-level annotations on mapper interfaces makes sense — though the interface only allows constants. They want dynamic data sources to extend to fields in service classes, prioritizing field > method > class annotations. To handle this, the service must identify which injection point was used. This might be tricky, as it's not straightforward in the default setup.

Solving conflicting data source selection logic

It seems like they want the field annotation in the service to take priority over everything else, meaning if a service method has any injected field with @TargetDataSource, it should dictate the data source. However, this creates a bit of ambiguity if multiple fields are annotated. There's also the complication of reconciling method-level annotations in case multiple annotations are used. I could create a utility, TargetDataSourceResolver, that checks in the proper order, starting with the data source set earlier and then moving through the field, method, and class annotations as needed.

Clarifying data source resolution priority

They want to prioritize field annotations over method and class ones. This means, on a service joinpoint, if a field is annotated with @TargetDataSource, it should take precedence, even if the method has its own annotation. To avoid conflicts, I'll check if a data source is already set in ThreadLocal before applying new DS, maintaining a stack for nested joinpoints. I'll focus on identifying @TargetDataSource annotations on fields in service/DAO classes, choosing the right field based on the executing method.

Handling multiple annotated mapper fields in service

In cases where multiple annotated mapper fields exist in a service, we should set a rule. If these fields have different data sources, we log a warning and pick the first or throw an error. If only one distinct data source is found, use it; otherwise, fallback to method/class or default. The key rule: field-level applies only when unambiguous, with appropriate checks for overriding and defaults.

Refining data source selection process

We’ll avoid setting the default data source when no annotation is present, allowing inner joinpoints to do so. The outer service method won’t set a DS without an annotation, but annotated mapper methods will. For nested overrides, if there's already a DS set, it won't be overridden, ensuring the field takes priority.

Resolving data source annotations in the call stack

The priority order for annotations is field > method > class, but the call stack approach ensures the first match wins (closest to the entry point). We'll check field annotations first, then method, and then class. If no match is found, inner joinpoints won’t override. The ambiguity between service class and mapper method annotations is acknowledged, and a pointcut will ensure proper interception.

Implementing robust data source resolution

For class-level annotations, we'll check both the mapper interface and the target class for @TargetDataSource. For field-level, we’ll scan the fields using AopProxyUtils to handle service proxies. If a field annotation has an empty name, we’ll ignore it. We’ll modify the DynamicDataSource to use a ThreadLocal stack for supporting nested calls while preserving priority.

Implementing dynamic data source resolution with priority

We’ll ensure outer and inner joinpoints adhere to priority, where field > method > class. To manage context, we’ll create a separate DynamicDataSourceContextHolder with a ThreadLocal stack that holds both the data source key and its priority. We'll modify the DynamicDataSource class and aspect logic to handle this priority-based resolution and context management.

Integrating dynamic data source management into DynamicDataSource

We’re modifying the DynamicDataSourceContextHolder to include priority management. The implementation ensures inner joinpoints smoothly override outer joinpoints, and method priority is applied accordingly. The DynamicDataSource now delegates to this context holder for better clarity. We’ll also maintain backward compatibility by keeping setDataSource as a wrapper around the new priority logic without breaks. Adjustments for proxies are needed, and field scans will be cached for better performance.

Addressing dynamic data source priority management

We're updating DynamicDataSourceContextHolder to handle priority for method, field, and class annotations. We’ll ensure that inner calls can override outer calls by comparing priorities. For nested calls, the more specific annotation (closer context) will win. We’ll also update the TargetDataSourceAspect to handle the logic of dynamically selecting and switching data sources based on prior annotations. Priority handling will be flexible to allow overrides while maintaining logical consistency.

Handling dynamic data source selection with field resolution

Currently, if an outer method uses one data source and an inner method uses another, the inner will override it with field-level annotation priority, which works well. We’ll implement cache-based field and method resolution, ensure correct push/pop behavior with annotations, and improve the aspect for correct context management. The idea is to track data sources effectively to prevent conflicts while ensuring priority adherence across fields, methods, and classes.

Thought for 6m 27s

回答

下面给你一套可落地的改造方案:同时支持

  1. 自动注入的 mapper 字段@Autowired private XxxMapper xxxMapper; 这种字段上加注解)
  2. mapper.java 具体方法(方法上加注解)
  3. mapper.java 类/接口(接口上加注解,已有能力)

并且满足你要的优先级:字段 > 方法 > 类
另外我会顺手把一个“隐形坑”修掉:你现在 AOP 在没注解时强行 setDataSource("elss"),会导致外层先把 elss 写进 ThreadLocal,从而内层 mapper 方法/类注解永远没有机会生效。正确做法是:没匹配到注解时不设置,让 AbstractRoutingDataSource 走默认数据源即可(你已经在 DynamicDataSource(elss, dsMap) 配了默认)。


一、增加一个“带优先级 + 支持嵌套”的上下文 Holder

目的:

  • 支持嵌套调用时 push/pop 恢复上一个数据源
  • 让“字段 > 方法 > 类”的优先级在嵌套场景也成立
  • 只有更高/同级优先级才允许覆盖(同级内层覆盖外层更符合直觉)

新增文件:DynamicDataSourceContextHolder.java

java
package com.wiscom.basic.config.datasource; import org.springframework.util.StringUtils; import java.util.ArrayDeque; import java.util.Deque; public final class DynamicDataSourceContextHolder { private DynamicDataSourceContextHolder() { } public enum Priority { MANUAL(0), FIELD(1), METHOD(2), CLASS(3); private final int level; Priority(int level) { this.level = level; } public int level() { return level; } } private static final class Context { private final String key; private final Priority priority; private Context(String key, Priority priority) { this.key = key; this.priority = priority; } } private static final ThreadLocal<Deque<Context>> HOLDER = ThreadLocal.withInitial(ArrayDeque::new); public static String peek() { Deque<Context> deque = HOLDER.get(); return deque.isEmpty() ? null : deque.peek().key; } public static Priority peekPriority() { Deque<Context> deque = HOLDER.get(); return deque.isEmpty() ? null : deque.peek().priority; } /** * @return true 表示本次确实 push 了(需要在 finally pop) */ public static boolean push(String key, Priority priority) { if (!StringUtils.hasText(key) || priority == null) { return false; } Deque<Context> deque = HOLDER.get(); Context current = deque.peek(); // 允许同级覆盖:内层 > 外层(同优先级时) if (current == null || priority.level() <= current.priority.level()) { deque.push(new Context(key, priority)); return true; } return false; } public static void pop(boolean pushed) { if (!pushed) { return; } Deque<Context> deque = HOLDER.get(); if (!deque.isEmpty()) { deque.pop(); } if (deque.isEmpty()) { HOLDER.remove(); } } public static void clear() { HOLDER.remove(); } }

二、让 DynamicDataSource 从 Holder 里取当前 key(并保留你原有 API)

修改 DynamicDataSource

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { @Override protected Object determineCurrentLookupKey() { return DynamicDataSourceContextHolder.peek(); } /** * 兼容旧用法:手动指定时认为最高优先级 */ public static void setDataSource(String dataSource) { DynamicDataSourceContextHolder.push(dataSource, DynamicDataSourceContextHolder.Priority.MANUAL); } public static String getDataSource() { return DynamicDataSourceContextHolder.peek(); } public static void clearDataSource() { DynamicDataSourceContextHolder.clear(); } public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }

关键点:没设置 key 时返回 nullAbstractRoutingDataSource 会自动用默认数据源(你默认就是 elss)。


三、改造 AOP:字段 > 方法 > 类,并且加缓存避免反射频繁扫描

你原来的 TargetDataSourceAspect 只看了接口类注解,而且没注解就强制设 elss。下面改造后:

  • 字段:扫描当前被调用 Bean 的字段(通常是 service/dao),找 @TargetDataSource(name="...")
    • 如果同一个类里有多个字段注解且数据源不同:认为歧义,不使用字段级(打印 warn),你就用方法/类来明确
    • 如果多个字段但数据源相同:OK
  • 方法:优先取 “最具体方法”(兼容代理/接口)
  • 类:先看 ms.getDeclaringType()(对 mapper 接口很关键),再看 targetClass

修改后的 TargetDataSourceAspect

java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.aop.framework.AopProxyUtils; import org.springframework.aop.support.AopUtils; import org.springframework.core.BridgeMethodResolver; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.HashSet; @Component @Aspect @Slf4j @Order(-100) public class TargetDataSourceAspect { @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() { } private static final class DsSelection { private final String key; private final DynamicDataSourceContextHolder.Priority priority; private DsSelection(String key, DynamicDataSourceContextHolder.Priority priority) { this.key = key; this.priority = priority; } } // 缓存:类 -> 字段注解解析结果("" 表示无/歧义) private final ConcurrentMap<Class<?>, String> fieldDsCache = new ConcurrentHashMap<>(); // 缓存:方法 -> 方法注解解析结果("" 表示无) private final ConcurrentMap<Method, String> methodDsCache = new ConcurrentHashMap<>(); // 缓存:类 -> 类注解解析结果("" 表示无) private final ConcurrentMap<Class<?>, String> classDsCache = new ConcurrentHashMap<>(); @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { DsSelection selection = resolve(joinPoint); boolean pushed = false; if (selection != null) { pushed = DynamicDataSourceContextHolder.push(selection.key, selection.priority); if (pushed) { log.debug("切换数据源:{}(来源:{})", selection.key, selection.priority); } else { log.debug("保持当前数据源:{}(当前优先级:{},尝试来源:{})", DynamicDataSourceContextHolder.peek(), DynamicDataSourceContextHolder.peekPriority(), selection.priority); } } else { // 不设置时走默认数据源(DynamicDataSource 的 defaultTargetDataSource) log.debug("未匹配到数据源注解,使用默认数据源(不写入上下文)"); } try { return joinPoint.proceed(); } finally { DynamicDataSourceContextHolder.pop(pushed); if (pushed) { log.debug("数据源上下文已恢复,当前:{}", DynamicDataSourceContextHolder.peek()); } } } private DsSelection resolve(ProceedingJoinPoint joinPoint) { MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Object target = joinPoint.getTarget(); Class<?> targetClass = (target == null) ? ms.getDeclaringType() : AopProxyUtils.ultimateTargetClass(target); // 取最具体方法(兼容接口/代理) Method interfaceMethod = ms.getMethod(); Method specificMethod = AopUtils.getMostSpecificMethod(interfaceMethod, targetClass); specificMethod = BridgeMethodResolver.findBridgedMethod(specificMethod); // 1) 字段(FIELD 优先级最高) String fieldKey = resolveFromFields(targetClass); if (StringUtils.hasText(fieldKey)) { return new DsSelection(fieldKey, DynamicDataSourceContextHolder.Priority.FIELD); } // 2) 方法(METHOD) String methodKey = resolveFromMethod(specificMethod); if (!StringUtils.hasText(methodKey) && interfaceMethod != specificMethod) { // 保险:接口方法上也可能有注解 methodKey = resolveFromMethod(interfaceMethod); } if (StringUtils.hasText(methodKey)) { return new DsSelection(methodKey, DynamicDataSourceContextHolder.Priority.METHOD); } // 3) 类(CLASS):优先 declaringType(对 mapper 接口很关键),再 targetClass Class<?> declaringType = ms.getDeclaringType(); String declaringKey = resolveFromClass(declaringType); if (StringUtils.hasText(declaringKey)) { return new DsSelection(declaringKey, DynamicDataSourceContextHolder.Priority.CLASS); } String targetKey = resolveFromClass(targetClass); if (StringUtils.hasText(targetKey)) { return new DsSelection(targetKey, DynamicDataSourceContextHolder.Priority.CLASS); } return null; } private String resolveFromFields(Class<?> targetClass) { return fieldDsCache.computeIfAbsent(targetClass, cls -> { Set<String> names = new HashSet<>(); ReflectionUtils.doWithFields(cls, field -> { if (Modifier.isStatic(field.getModifiers())) { return; } TargetDataSource ann = field.getAnnotation(TargetDataSource.class); if (ann != null && StringUtils.hasText(ann.name())) { names.add(ann.name()); } }); if (names.isEmpty()) { return ""; } if (names.size() == 1) { return names.iterator().next(); } // 歧义:同一个类多个字段不同数据源,无法自动判断当前方法用哪个 mapper log.warn("类 {} 存在多个字段 @TargetDataSource 且数据源不同:{},字段级策略不生效,请用方法/类注解明确。", cls.getName(), names); return ""; }); } private String resolveFromMethod(Method method) { return methodDsCache.computeIfAbsent(method, m -> { TargetDataSource ann = AnnotationUtils.findAnnotation(m, TargetDataSource.class); if (ann == null || !StringUtils.hasText(ann.name())) { return ""; } return ann.name(); }); } private String resolveFromClass(Class<?> clazz) { return classDsCache.computeIfAbsent(clazz, c -> { TargetDataSource ann = AnnotationUtils.findAnnotation(c, TargetDataSource.class); if (ann == null || !StringUtils.hasText(ann.name())) { return ""; } return ann.name(); }); } }

四、怎么用(对应你三个适配点)

1)自动注入 mapper 字段适配(字段优先级最高)

java
@Service public class DemoService { @TargetDataSource(name = "envs") @Autowired private UserMapper userMapper; public void test() { userMapper.selectById(1L); } }

说明:

  • 只要该 Service 类里字段注解不歧义(只有一个数据源,或多个字段但同名数据源),进入任意 service 方法就会切到 envs
  • 如果一个类里确实有多个 mapper 字段不同库:会 warn 并不走字段级,你就用 方法注解mapper 方法注解 精确指定

2)mapper.java 具体方法适配(方法优先级第二)

java
@Mapper public interface UserMapper { @TargetDataSource(name = "envs") User selectById(Long id); User selectDefault(Long id); }

3)mapper.java 类/接口适配(类优先级第三,你已有)

java
@TargetDataSource(name = "auto-dm") @Mapper public interface AutoMapper { int insertXxx(...); }

五、你现在配置里一个容易踩的点(顺带提醒)

DataSourceConfig 里:

  • Bean 名:@Bean(name = "elss_centrally")
  • dsMap key:dsMap.put("elss-centrally", elssCentrally);

那你注解里必须写 elss-centrally 才能匹配(而不是 elss_centrally)。
建议统一成一个名字(要么都用下划线,要么都用中划线),避免线上排查很痛苦。


如果你希望“字段级”能精确到当前调用的是哪个 mapper 字段(同一 service 内多个 mapper 不同库也能自动判断),那就不是纯 AOP 能稳定做到的了,需要引入注入点代理或更深的调用链识别方案;但就你现在的诉求(字段/方法/类优先级)和现有工程复杂度,上面这套通常已经够用且稳定。

问题

在service中对多个字段进行标注@TargetDataSource,其中的方法还使用了这几个数据源,那么表示这个方法需要调用不同数据源来完成业务操作。另外如果AService的A方法分别调用BService的B方法和CService的C方法,其中B和C用自动注入同一个mapper的方法,但不是同一个数据源@TargetDataSource,会导致运行出错么?

分享这个问答