目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,可以针对 自动注入的mapp...

Erstellt am: 28. Januar 2026

Beantwortet mit GPT-5.2 Thinking von Chat01

Frage

目前动态数据源注解只对mapper.java做了适配,现在需要优化完善,可以针对 自动注入的mapper字段适配,mapper.java中的具体方法适配,已经目前已有的mapper方法类的适配。如果都有数据源注解,那么优先级 字段 > 方法 > 类,下面是已有代码

自定义注解

java
package com.wiscom.basic.config.datasource; import java.lang.annotation.*; @Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface TargetDataSource { String name() default ""; }

数据源切换和注解解析

java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import java.util.Objects; @Component @Aspect @Slf4j @Order(-100) public class TargetDataSourceAspect { /** * 切入点:所有dao包下接口文件 */ @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() { } /** * 设置具体数据源 * * @param joinPoint 切点 * @return Object * @throws Throwable 异常 */ @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { // 获取当前指定的获取mapper接口数据源 MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Class<?> targetInterface = ms.getDeclaringType(); TargetDataSource dataSource = targetInterface.getAnnotation(TargetDataSource.class); /* 没有匹配到的话,使用默认数据源,否则设置到动态数据源上下文中 */ if (Objects.isNull(dataSource)) { DynamicDataSource.setDataSource("elss"); log.debug("使用默认数据源"); } else { DynamicDataSource.setDataSource(dataSource.name()); log.debug("匹配到数据源:{}", dataSource.name()); } try { return joinPoint.proceed(); } finally { /* 方法执行完毕之后,销毁当前数据源信息,进行垃圾回收 */ DynamicDataSource.clearDataSource(); log.debug("当前数据源已清空"); } } }

其他方法

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { private static final ThreadLocal<String> DB_CONTEXT_HOLDER = new ThreadLocal<>(); /** * 取得当前使用的数据源 * * @return 当前使用的数据源 */ @Override protected Object determineCurrentLookupKey() { return getDataSource(); } /** * 设置数据源 * * @param dataSource 数据源 */ public static void setDataSource(String dataSource) { DB_CONTEXT_HOLDER.set(dataSource); } /** * 获取当前数据源 * * @return 数据源 */ public static String getDataSource() { return DB_CONTEXT_HOLDER.get(); } /** * 清除上下文 */ public static void clearDataSource() { DB_CONTEXT_HOLDER.remove(); } /** * 设置默认数据源,和可切换的数据源Map * * @param defaultTargetDataSource 默认数据源 * @param targetDataSources 可切换的数据源Map */ public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }
java
package com.wiscom.basic.config.datasource; import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.stereotype.Component; import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; @Configuration @Component public class DataSourceConfig { /** * 配置弃电系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "elss") @ConfigurationProperties(prefix = "spring.datasource.elss") public DataSource pgsqlSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置统调弃电数据源 * * @return 数据源对象 */ @Bean(name = "elss_centrally") @ConfigurationProperties(prefix = "spring.datasource.elss-centrally") @ConditionalOnProperty(prefix = "spring.datasource.elss-centrally", name = "url") public DataSource pgsqlElssCentrallySource() { return DruidDataSourceBuilder.create().build(); } /** * 配置可视化系统pgsql数据源 * * @return 数据源对象 */ @Bean(name = "envs") @ConfigurationProperties(prefix = "spring.datasource.envs") @ConditionalOnProperty(prefix = "spring.datasource.envs", name = "url") public DataSource pgsqlEnvsSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置一平面同步pgsql数据源 * * @return 数据源对象 */ @Bean(name = "ypm-sync") @ConfigurationProperties(prefix = "spring.datasource.ypm-sync") @ConditionalOnProperty(prefix = "spring.datasource.ypm-sync", name = "url") public DataSource pgsqlYpmSyncSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化新一代达梦7数据源 * * @return 数据源对象 */ @Bean(name = "auto-dm") @ConfigurationProperties(prefix = "spring.datasource.auto-dm") @ConditionalOnProperty(prefix = "spring.datasource.auto-dm", name = "url") public DataSource autoDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 配置自动化现货处达梦8 * * @return 数据源对象 */ @Bean(name = "ddc-dm") @ConfigurationProperties(prefix = "spring.datasource.ddc-dm") @ConditionalOnProperty(prefix = "spring.datasource.ddc-dm", name = "url") public DataSource ddcDmSource() { return DruidDataSourceBuilder.create().build(); } /** * 数据源配置 * * @param elss 数据源elss * @param envs 数据源envs * @param ypmSync 可视化ypm同步库 * @param autoDm 自动化达梦数据库 * @param ddcDm 调度出达梦数据库 * @param elssCentrally 统调弃电系统数据库 * @return 动态数据源切换对象。 */ @Bean @Primary public DataSource dataSource(@Qualifier("elss") DataSource elss, @Autowired(required = false) @Qualifier("envs") DataSource envs, @Autowired(required = false) @Qualifier("ypm-sync") DataSource ypmSync, @Autowired(required = false) @Qualifier("auto-dm") DataSource autoDm, @Autowired(required = false) @Qualifier("ddc-dm") DataSource ddcDm, @Autowired(required = false) @Qualifier("elss_centrally") DataSource elssCentrally) { Map<Object, Object> dsMap = new HashMap<>(16); dsMap.put("elss", elss); if (envs != null) { dsMap.put("envs", envs); } if (ypmSync != null) { dsMap.put("ypm-sync", ypmSync); } if (autoDm != null) { dsMap.put("auto-dm", autoDm); } if (ddcDm != null) { dsMap.put("ddc-dm", ddcDm); } if (elssCentrally != null) { dsMap.put("elss-centrally", elssCentrally); } return new DynamicDataSource(elss, dsMap); } }

Gedanke

Considering the annotation's effect on data source choice

I think they might want service methods annotated to use the correct data source. Specifically, if a service has a @TargetDataSource annotation on a field, the method should use that data source, maybe even defaulting for the service. I wonder if AOP can help by intercepting method calls to map the field correctly.

Exploring data source handling with mapper proxies

I’m thinking about how to manage the data source when injecting mapper beans. If the data source depends on the field’s injection point, it’s tricky since the same bean can be injected into different services. Maybe wrapping the mapper bean with a proxy could work, but I’d need to account for context-dependent data sources.

Designing field-level data source handling

To handle field-level annotations, I could interpret it as setting the data source (DS) based on the first annotated mapper field in the service class when the service method runs. But what if there are multiple fields? Maybe a more robust approach would be to detect the @TargetDataSource annotation on the field or on specific methods.

Refining approach for data source handling

It seems it's not possible to get the data source directly from the method call. One alternative is to focus on intercepting only the mapper methods and choosing the data source based on annotations at the field level. If the service method isn't annotated, then we could infer the DS from the mapper field's annotation.

Designing data source selection for service/DAO methods

For a service method, if there's a field annotation in the service class, it should take priority over class and method annotations. To determine the data source (DS) at runtime, I'd look for the @TargetDataSource annotation on relevant fields, fall back to method-level or class-level annotations if needed, and deal with possible ambiguities safely.

Designing a data source resolution mechanism

To resolve the data source (DS) for a given method, I’ll create a utility to scan the target class’s fields, methods, and class annotations in order of priority (field > method > class). If multiple fields are annotated differently, I'll fallback to the method or class annotations, and avoid overriding the DS context unless necessary. This helps ensure accurate and context-specific DS determination across service and mapper layers.

Handling data source prioritization and nested calls

The priority for selecting the data source (DS) should be field > method > class, focusing on injected fields in the service, not the mapper method. In the case of nested calls, the service sets DS first, but mapper-level annotations may override it. To avoid conflicts, a stack-based context might be needed to save and restore DS.

Implementing dynamic data source context stack

To manage multiple data sources, we can use a stack with ThreadLocal<Deque<String>>. Methods push and pop the current data source and its priority. Context is resolved based on annotation priority (field > method > class). The service layer will set the DS first and avoid overrides within a method if the priority is lower.

Implementing dynamic data source context stack in nested calls

We’ll need to push the current context when nesting, but it’s fine to only pop if something was pushed. The service method might set DS via the field, but the mapper method shouldn’t override it if priorities don’t match. We'll also ensure annotations respect the field or class type, and only handle specific mappers.

Proposing final solution for Dynamic DataSource management

I’m suggesting we create a context holder to support nesting and priority levels. Modify DynamicDataSource to leverage this holder, and update the aspect to resolve DS based on field, method, class, and default priority. Additionally, we'll support automatic injection for mapper fields and provide examples with minimal explanations.

Finalizing Dynamic DataSource priority handling

I’m designing the code to implement priority management for data sources, including an enum, context holder, and adjustments to DynamicDataSource. The approach considers overriding logic for nested method calls based on priority, ensuring correct push/pop operations. I'll ensure annotation resolving and restore behaviors like skipping unnecessary pushes when priorities match.

Implementing AOP for DataSource priority handling

I’m setting up AOP to handle DataSource resolution by field, method, and class, respecting priority. The resolver checks each level, ensuring inner methods override outer methods when priorities match. The aspect ensures correct data source is applied per method or service context. For tests, I’ll provide a JUnit snippet, and maintain compatibility with @Transactional precedence through adjusted pointcut ordering.

Resolving method and field DS conflicts

I believe the user wants method annotations to override field ones in certain cases, but we need to ensure that field annotations always have priority. The most logical approach seems to be applying field-based DS resolution when the method refers to the mapper, but excluding service method annotations unless explicitly needed.

Thought for 6m 20s

Antwort

下面给一套「字段(注入点)> 方法 > 类」完整适配方案,并且把你现在的 ThreadLocal 单值改成 栈(Stack)上下文,解决嵌套调用时“内层 clear 把外层数据源清掉”的隐患,同时实现“高优先级不被低优先级覆盖;同优先级内层可覆盖外层”。


1)新增优先级枚举

java
package com.wiscom.basic.config.datasource; public enum DataSourcePriority { DEFAULT(0), CLASS(1), METHOD(2), FIELD(3); private final int level; DataSourcePriority(int level) { this.level = level; } public int level() { return level; } }

2)新增上下文 Holder(ThreadLocal + Deque)

关键点:

  • 支持嵌套:push / pop 恢复外层
  • 覆盖规则:priority >= 当前priority 时允许覆盖(同优先级也允许内层覆盖外层)
  • 如果候选值与当前完全相同,则不 push(减少开销)
java
package com.wiscom.basic.config.datasource; import java.util.ArrayDeque; import java.util.Deque; import java.util.Objects; public final class DynamicDataSourceContextHolder { private DynamicDataSourceContextHolder() { } private static final ThreadLocal<Deque<Context>> HOLDER = ThreadLocal.withInitial(ArrayDeque::new); public static String peek() { Context ctx = HOLDER.get().peek(); return ctx == null ? null : ctx.name; } public static int peekPriority() { Context ctx = HOLDER.get().peek(); return ctx == null ? Integer.MIN_VALUE : ctx.priority; } /** * 尝试切换数据源: * - 当 priority >= 当前 priority 时允许覆盖(同优先级内层覆盖外层) * - 当候选与当前完全一致时不入栈 * * @return 是否真的发生了 push(发生 push 才需要 pop) */ public static boolean pushIfNeeded(String name, int priority) { String current = peek(); int currentPriority = peekPriority(); if (Objects.equals(name, current) && priority == currentPriority) { return false; } if (priority >= currentPriority) { HOLDER.get().push(new Context(name, priority)); return true; } return false; } public static void pop() { Deque<Context> deque = HOLDER.get(); if (!deque.isEmpty()) { deque.pop(); } if (deque.isEmpty()) { HOLDER.remove(); } } private static final class Context { private final String name; private final int priority; private Context(String name, int priority) { this.name = name; this.priority = priority; } } }

3)改造 DynamicDataSource:使用栈上下文 + setDataSource 返回是否 push

java
package com.wiscom.basic.config.datasource; import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource; import javax.sql.DataSource; import java.util.Map; public class DynamicDataSource extends AbstractRoutingDataSource { @Override protected Object determineCurrentLookupKey() { return getDataSource(); } /** 兼容旧调用:默认优先级 DEFAULT */ public static boolean setDataSource(String dataSource) { return setDataSource(dataSource, DataSourcePriority.DEFAULT.level()); } /** 新调用:带优先级 */ public static boolean setDataSource(String dataSource, int priority) { return DynamicDataSourceContextHolder.pushIfNeeded(dataSource, priority); } public static String getDataSource() { return DynamicDataSourceContextHolder.peek(); } public static void clearDataSource() { DynamicDataSourceContextHolder.pop(); } public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) { super.setDefaultTargetDataSource(defaultTargetDataSource); super.setTargetDataSources(targetDataSources); super.afterPropertiesSet(); } }

4)改造 Aspect:字段 > 方法 > 类(并且“字段”适配自动注入 mapper)

实现要点:

  • 字段(FIELD):在当前执行的 Bean(通常是 Service/Dao)里扫描:
    @Autowired/@Resource 且字段类型是 mapper/dao(按包名或注解判断),并且字段上有 @TargetDataSource
    • 若扫描到 多个不同数据源,无法判定该方法用哪个 mapper,安全起见忽略字段规则,降级走方法/类(建议这种场景用方法注解明确指定)
  • 方法(METHOD):优先取“最具体实现方法”(兼容 CGLIB / 继承),再取接口方法
  • 类(CLASS):目标类、其接口、以及声明类型(mapper interface)都查一遍
  • 默认:elss
  • 嵌套恢复:只有当本次真的 push 了,finally 才 pop
java
package com.wiscom.basic.config.datasource; import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import javax.annotation.Resource; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.*; @Component @Aspect @Slf4j @Order(-100) public class TargetDataSourceAspect { private static final String DEFAULT_DS = "elss"; @Pointcut("execution(* com.wiscom..*.service..*.*(..)) || execution(* com.wiscom..*.dao..*.*(..)) || execution(* com.wiscom..*.mapper..*.*(..))") private void dsPointCut() { } @Around("dsPointCut()") public Object around(ProceedingJoinPoint joinPoint) throws Throwable { ResolveResult rr = resolveDataSource(joinPoint); String ds = (rr != null && StringUtils.hasText(rr.dsName)) ? rr.dsName : DEFAULT_DS; int priority = (rr != null) ? rr.priority.level() : DataSourcePriority.DEFAULT.level(); boolean pushed = DynamicDataSource.setDataSource(ds, priority); if (pushed) { log.debug("切换数据源 => {} (priority={})", ds, priority); } else { log.debug("保持数据源 => {} (currentPriority >= {})", DynamicDataSource.getDataSource(), priority); } try { return joinPoint.proceed(); } finally { if (pushed) { DynamicDataSource.clearDataSource(); log.debug("数据源上下文已恢复/弹栈"); } } } private ResolveResult resolveDataSource(ProceedingJoinPoint joinPoint) { MethodSignature ms = (MethodSignature) joinPoint.getSignature(); Method signatureMethod = ms.getMethod(); Class<?> declaringType = ms.getDeclaringType(); Object target = joinPoint.getTarget(); Class<?> targetClass = (target != null) ? AopUtils.getTargetClass(target) : declaringType; // 1) 字段:自动注入 mapper 字段(FIELD 最高) ResolveResult byField = resolveByAutowiredMapperField(targetClass); if (byField != null) { return byField; } // 2) 方法:mapper.java 具体方法(METHOD) ResolveResult byMethod = resolveByMethod(signatureMethod, targetClass); if (byMethod != null) { return byMethod; } // 3) 类:mapper 类/接口(CLASS) ResolveResult byClass = resolveByClass(targetClass, declaringType); if (byClass != null) { return byClass; } // 4) 默认 return new ResolveResult(DEFAULT_DS, DataSourcePriority.DEFAULT); } /** * 解析字段注解:仅当能“唯一确定一个数据源”时才返回。 * 如果发现多个不同的数据源(多个 mapper 字段各自标了不同 ds),则忽略字段规则,避免误切换。 */ private ResolveResult resolveByAutowiredMapperField(Class<?> targetClass) { if (targetClass == null) { return null; } Set<String> dsNames = new LinkedHashSet<>(); for (Field field : getAllFields(targetClass)) { // 只处理注入字段(你说的“自动注入 mapper 字段”) if (!isInjectionField(field)) { continue; } // 只处理 mapper/dao 类型字段,避免误伤其它注入字段 if (!looksLikeMapperOrDao(field.getType())) { continue; } TargetDataSource ann = AnnotatedElementUtils.findMergedAnnotation(field, TargetDataSource.class); if (ann == null || !StringUtils.hasText(ann.name())) { continue; } dsNames.add(ann.name()); } if (dsNames.isEmpty()) { return null; } if (dsNames.size() == 1) { String ds = dsNames.iterator().next(); return new ResolveResult(ds, DataSourcePriority.FIELD); } // 多个 mapper 字段对应多个数据源:无法判断当前方法用哪个 mapper => 安全降级 log.warn("发现多个 @TargetDataSource 注入字段且数据源不同:{},本次不使用字段优先级,建议用方法注解明确指定", dsNames); return null; } private ResolveResult resolveByMethod(Method signatureMethod, Class<?> targetClass) { if (signatureMethod == null) { return null; } // 取“最具体方法”(兼容 CGLIB / 继承) Method specific = (targetClass != null) ? AopUtils.getMostSpecificMethod(signatureMethod, targetClass) : signatureMethod; TargetDataSource ann = AnnotatedElementUtils.findMergedAnnotation(specific, TargetDataSource.class); if (ann == null) { // 兼容 JDK 代理时注解写在接口方法上 ann = AnnotatedElementUtils.findMergedAnnotation(signatureMethod, TargetDataSource.class); } if (ann != null && StringUtils.hasText(ann.name())) { return new ResolveResult(ann.name(), DataSourcePriority.METHOD); } return null; } private ResolveResult resolveByClass(Class<?> targetClass, Class<?> declaringType) { // 目标类 TargetDataSource ann = (targetClass != null) ? AnnotatedElementUtils.findMergedAnnotation(targetClass, TargetDataSource.class) : null; // 目标类接口(mapper 通常是接口) if (ann == null && targetClass != null) { for (Class<?> itf : targetClass.getInterfaces()) { ann = AnnotatedElementUtils.findMergedAnnotation(itf, TargetDataSource.class); if (ann != null) { break; } } } // 声明类型(MethodSignature.getDeclaringType()) if (ann == null && declaringType != null) { ann = AnnotatedElementUtils.findMergedAnnotation(declaringType, TargetDataSource.class); } if (ann != null && StringUtils.hasText(ann.name())) { return new ResolveResult(ann.name(), DataSourcePriority.CLASS); } return null; } private boolean isInjectionField(Field field) { return field.isAnnotationPresent(Autowired.class) || field.isAnnotationPresent(Resource.class); } /** * 你可以按项目实际情况增强判断: * - 包名包含 .mapper. / .dao. * - 或者类型/接口上有 @Mapper/@Repository 等 */ private boolean looksLikeMapperOrDao(Class<?> type) { if (type == null) { return false; } String name = type.getName(); return name.contains(".mapper.") || name.contains(".dao."); } private List<Field> getAllFields(Class<?> type) { List<Field> fields = new ArrayList<>(); Class<?> cur = type; while (cur != null && cur != Object.class) { fields.addAll(Arrays.asList(cur.getDeclaredFields())); cur = cur.getSuperclass(); } return fields; } private static final class ResolveResult { private final String dsName; private final DataSourcePriority priority; private ResolveResult(String dsName, DataSourcePriority priority) { this.dsName = dsName; this.priority = priority; } } }

5)怎么用(覆盖关系示例)

A. 自动注入 mapper 字段(最高优先级)

java
@Service public class DemoService { @Autowired @TargetDataSource(name = "envs") private DemoMapper demoMapper; public void biz() { demoMapper.selectX(); // => envs } }

B. mapper 方法级(字段不存在/无法唯一确定时生效)

java
@Mapper public interface DemoMapper { @TargetDataSource(name = "auto-dm") List<String> selectX(); }

C. mapper 类/接口级(字段、方法都没有时生效)

java
@TargetDataSource(name = "ypm-sync") @Mapper public interface DemoMapper { List<String> selectX(); }

6)你现在代码里一个“隐患”已经顺手修掉了

你原来是单值 ThreadLocal + 每层 finally clear:

  • service 方法进来 set
  • mapper 方法进来 set
  • mapper finally clear(把 service 的也清掉了)

改成 之后:

  • 内层 pop 只会恢复外层,不会把外层直接 remove 掉。

如果你们确实存在“一个 Service 同时注入多个 mapper 且各自不同数据源”的场景,我建议的做法是:字段注解只作为默认值,具体分支用 方法注解 明确标注(因为仅靠 AOP 无法知道某个 service 方法内部到底会用哪个字段)。我上面的实现也按这个原则做了“多字段不同 ds => 安全降级”。

Diesen Q&A teilen