Compare commits

...

3 Commits

Author SHA1 Message Date
Hanserwei
042a2622d9 子模块打包文件 2025-10-09 09:50:40 +08:00
Hanserwei
28ab543d57 reflector(distributed-id-generator): 重构分布式ID生成器配置与数据源管理
- 引入 LeafProperties 配置类统一管理ID生成器配置
- 添加 LeafDataSourceConfiguration 配置数据源
- 移除旧的 Constants 类和 leaf.properties 文件
- 更新 SegmentService 和 SnowflakeService 初始化逻辑
- 修改 SnowflakeZookeeperHolder 构造函数增加 leafName 参数- 升级 MyBatis 到 MyBatis-Plus 并更新相关依赖版本
- 使用 Spring Boot 注解替换手动数据源创建
- 优化 DAO 层实现使用 Spring 注解和事务管理
- 更新应用配置文件支持多环境配置
- 添加 caffeine、commons-io、perf4j、curator-recipes 等依赖版本管理
2025-10-09 09:50:13 +08:00
Hanserwei
31b06fe32d fix(id-gen): 重命名分布式id模块名称,与其他模块一致。 2025-10-09 08:52:44 +08:00
52 changed files with 629 additions and 422 deletions

View File

@@ -16,6 +16,10 @@
<module>han-note-gateway</module> <module>han-note-gateway</module>
<module>han-note-oss</module> <module>han-note-oss</module>
<module>han-note-user</module> <module>han-note-user</module>
<module>han-note-kv</module>
<module>han-note-distributed-id-generator</module>
<module>han-note-note</module>
<module>han-note-note/han-note-note-biz</module>
</modules> </modules>
<properties> <properties>
@@ -51,6 +55,11 @@
<jaxb-runtime.version>2.3.3</jaxb-runtime.version> <jaxb-runtime.version>2.3.3</jaxb-runtime.version>
<cos-api.version>5.6.227</cos-api.version> <cos-api.version>5.6.227</cos-api.version>
<feign-form.version>3.8.0</feign-form.version> <feign-form.version>3.8.0</feign-form.version>
<caffeine.version>3.2.2</caffeine.version>
<common-io.version>2.20.0</common-io.version>
<perf4j.version>0.9.16</perf4j.version>
<curator-recipes.version>5.9.0</curator-recipes.version>
<zookeeper.version>3.9.4</zookeeper.version>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
@@ -215,12 +224,47 @@
<artifactId>feign-form</artifactId> <artifactId>feign-form</artifactId>
<version>${feign-form.version}</version> <version>${feign-form.version}</version>
</dependency> </dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${common-io.version}</version>
</dependency>
<dependency>
<groupId>org.perf4j</groupId>
<artifactId>perf4j</artifactId>
<version>${perf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator-recipes.version}</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.hanserwei</groupId> <groupId>com.hanserwei</groupId>
<artifactId>han-note-user-api</artifactId> <artifactId>han-note-user-api</artifactId>
<version>0.0.1-SNAPSHOT</version> <version>0.0.1-SNAPSHOT</version>
</dependency> </dependency>
<dependency>
<groupId>com.hanserwei</groupId>
<artifactId>han-note-kv-api</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.hanserwei</groupId>
<artifactId>han-note-distributed-id-generator-api</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<!-- caffeine本地缓存-->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>${caffeine.version}</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>

4
.idea/encodings.xml generated
View File

@@ -5,8 +5,8 @@
<file url="file://$PROJECT_DIR$/han-note-auth/src/main/resources" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-auth/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-api/src/main/java" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-api/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-api/src/main/resources" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-api/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/hannote-distributed-id-generator-biz/src/main/java" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-biz/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/hannote-distributed-id-generator-biz/src/main/resources" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/han-note-distributed-id-generator-biz/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/src/main/java" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/src/main/java" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/src/main/resources" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-distributed-id-generator/src/main/resources" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/han-note-gateway/src/main/java" charset="UTF-8" /> <file url="file://$PROJECT_DIR$/han-note-gateway/src/main/java" charset="UTF-8" />

View File

@@ -0,0 +1,27 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<!-- 指定父项目 -->
<parent>
<groupId>com.hanserwei</groupId>
<artifactId>han-note</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<!-- 多模块项目需要配置打包方式为 pom -->
<packaging>pom</packaging>
<!-- 子模块管理 -->
<modules>
<module>han-note-distributed-id-generator-api</module>
<module>han-note-distributed-id-generator-biz</module>
</modules>
<artifactId>han-note-distributed-id-generator</artifactId>
<!-- 项目名称 -->
<name>${project.artifactId}</name>
<!-- 项目描述 -->
<description>分布式 ID 生成服务</description>
</project>

View File

@@ -0,0 +1,124 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<!-- 指定父项目 -->
<parent>
<groupId>com.hanserwei</groupId>
<artifactId>han-note-distributed-id-generator</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<!-- 打包方式 -->
<packaging>jar</packaging>
<artifactId>han-note-distributed-id-generator-biz</artifactId>
<name>${project.artifactId}</name>
<description>分布式 ID 生成业务层</description>
<dependencies>
<dependency>
<groupId>com.hanserwei</groupId>
<artifactId>hanserwei-common</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-bootstrap</artifactId>
</dependency>
<!-- 服务发现 -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-spring-boot3-starter</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-3-starter</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.perf4j</groupId>
<artifactId>perf4j</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<!-- 为防止日志冲突,添加以下排除项 -->
<exclusions>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>org.slf4j</artifactId>
<groupId>slf4j-reload4j</groupId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<!-- 为防止日志冲突,添加以下排除项 -->
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@@ -11,17 +11,9 @@
<!-- 打包方式 --> <!-- 打包方式 -->
<packaging>jar</packaging> <packaging>jar</packaging>
<artifactId>hannote-distributed-id-generator-biz</artifactId> <artifactId>han-note-distributed-id-generator-biz</artifactId>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>
<description>分布式 ID 生成业务层</description> <description>分布式 ID 生成业务层</description>
<properties>
<common-io.version>2.4</common-io.version>
<perf4j.version>0.9.16</perf4j.version>
<druid.version>1.0.18</druid.version>
<mybatis.version>3.3.0</mybatis.version>
<curator-recipes.version>2.6.0</curator-recipes.version>
<zookeeper.version>3.6.0</zookeeper.version>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
@@ -50,39 +42,35 @@
<groupId>com.alibaba.cloud</groupId> <groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId> <artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-spring-boot3-starter</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-3-starter</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
<version>${common-io.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.perf4j</groupId> <groupId>org.perf4j</groupId>
<artifactId>perf4j</artifactId> <artifactId>perf4j</artifactId>
<version>${perf4j.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.29</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
<version>${mybatis.version}</version>
</dependency>
<!-- zk -->
<dependency> <dependency>
<groupId>org.apache.curator</groupId> <groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId> <artifactId>curator-recipes</artifactId>
<version>${curator-recipes.version}</version>
<!-- 为防止日志冲突,添加以下排除项 --> <!-- 为防止日志冲突,添加以下排除项 -->
<exclusions> <exclusions>
<exclusion> <exclusion>
@@ -103,10 +91,10 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.zookeeper</groupId> <groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId> <artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
<!-- 为防止日志冲突,添加以下排除项 --> <!-- 为防止日志冲突,添加以下排除项 -->
<exclusions> <exclusions>
<exclusion> <exclusion>

View File

@@ -0,0 +1,17 @@
package com.hanserwei.hannote.distributed.id.generator.biz;
import com.hanserwei.hannote.distributed.id.generator.biz.config.LeafProperties;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication
@EnableConfigurationProperties(LeafProperties.class)
@MapperScan(basePackageClasses = IDAllocMapper.class)
public class HannoteDistributedIdGeneratorBizApplication {
public static void main(String[] args) {
SpringApplication.run(HannoteDistributedIdGeneratorBizApplication.class, args);
}
}

View File

@@ -0,0 +1,43 @@
package com.hanserwei.hannote.distributed.id.generator.biz.config;
import com.alibaba.druid.spring.boot3.autoconfigure.DruidDataSourceBuilder;
import jakarta.annotation.PostConstruct;
import javax.sql.DataSource;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
@Slf4j
@Configuration
@RequiredArgsConstructor
public class LeafDataSourceConfiguration {
private final LeafProperties leafProperties;
@PostConstruct
void logDataSourceSource() {
LeafProperties.Jdbc jdbc = leafProperties.getJdbc();
if (StringUtils.hasText(jdbc.getUrl())) {
log.info("Leaf JDBC properties detected, will configure DruidDataSource via leaf.jdbc.*");
} else {
log.info("Leaf JDBC properties not set, relying on default spring.datasource configuration");
}
}
@Bean
@ConditionalOnMissingBean(DataSource.class)
@ConditionalOnProperty(prefix = "leaf.jdbc", name = "url")
public DataSource leafDataSource() {
LeafProperties.Jdbc jdbc = leafProperties.getJdbc();
var dataSource = DruidDataSourceBuilder.create().build();
dataSource.setUrl(jdbc.getUrl());
dataSource.setUsername(jdbc.getUsername());
dataSource.setPassword(jdbc.getPassword());
dataSource.setDriverClassName(jdbc.getDriverClassName());
return dataSource;
}
}

View File

@@ -0,0 +1,72 @@
package com.hanserwei.hannote.distributed.id.generator.biz.config;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Getter
@Setter
@ConfigurationProperties(prefix = "leaf")
public class LeafProperties {
/**
* 用于区分不同集群的唯一名称,影响 Snowflake 的 zk 节点路径。
*/
private String name = "leaf";
private final Segment segment = new Segment();
private final Snowflake snowflake = new Snowflake();
private final Jdbc jdbc = new Jdbc();
@Getter
@Setter
public static class Segment {
/**
* 是否启用号段模式 ID 生成。
*/
private boolean enable = true;
}
@Getter
@Setter
public static class Snowflake {
/**
* 是否启用 Snowflake 模式 ID 生成。
*/
private boolean enable = true;
/**
* Zookeeper 连接地址示例127.0.0.1:2181。
*/
private String zkAddress;
/**
* Snowflake 服务监听端口。
*/
private int port = 0;
}
@Getter
@Setter
public static class Jdbc {
/**
* JDBC 驱动类名。
*/
private String driverClassName = "com.mysql.cj.jdbc.Driver";
/**
* 数据库连接 URL。
*/
private String url;
/**
* 数据库用户名。
*/
private String username;
/**
* 数据库密码。
*/
private String password;
}
}

View File

@@ -1,5 +1,10 @@
package com.hanserwei.hannote.distributed.id.generator.biz.core.common; package com.hanserwei.hannote.distributed.id.generator.biz.core.common;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class Result { public class Result {
private long id; private long id;
private Status status; private Status status;
@@ -12,22 +17,6 @@ public class Result {
this.status = status; this.status = status;
} }
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
@Override @Override
public String toString() { public String toString() {
final StringBuilder sb = new StringBuilder("Result{"); final StringBuilder sb = new StringBuilder("Result{");

View File

@@ -19,7 +19,7 @@ public class Utils {
try { try {
List<String> ipList = getHostAddress(null); List<String> ipList = getHostAddress(null);
// default the first // default the first
ip = (!ipList.isEmpty()) ? ipList.get(0) : ""; ip = (!ipList.isEmpty()) ? ipList.getFirst() : "";
} catch (Exception ex) { } catch (Exception ex) {
ip = ""; ip = "";
logger.warn("Utils get IP warn", ex); logger.warn("Utils get IP warn", ex);
@@ -32,7 +32,7 @@ public class Utils {
interfaceName = interfaceName.trim(); interfaceName = interfaceName.trim();
try { try {
List<String> ipList = getHostAddress(interfaceName); List<String> ipList = getHostAddress(interfaceName);
ip = (!ipList.isEmpty()) ? ipList.get(0) : ""; ip = (!ipList.isEmpty()) ? ipList.getFirst() : "";
} catch (Exception ex) { } catch (Exception ex) {
ip = ""; ip = "";
logger.warn("Utils get IP warn", ex); logger.warn("Utils get IP warn", ex);

View File

@@ -1,10 +1,16 @@
package com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao; package com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.model.LeafAlloc; import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.model.LeafAlloc;
import org.apache.ibatis.annotations.*; import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Result;
import org.apache.ibatis.annotations.Results;
import org.apache.ibatis.annotations.Select;
import org.apache.ibatis.annotations.Update;
import java.util.List; import java.util.List;
@Mapper
public interface IDAllocMapper { public interface IDAllocMapper {
@Select("SELECT biz_tag, max_id, step, update_time FROM leaf_alloc") @Select("SELECT biz_tag, max_id, step, update_time FROM leaf_alloc")
@@ -27,7 +33,7 @@ public interface IDAllocMapper {
@Update("UPDATE leaf_alloc SET max_id = max_id + step WHERE biz_tag = #{tag}") @Update("UPDATE leaf_alloc SET max_id = max_id + step WHERE biz_tag = #{tag}")
void updateMaxId(@Param("tag") String tag); void updateMaxId(@Param("tag") String tag);
@Update("UPDATE leaf_alloc SET max_id = max_id + #{step} WHERE biz_tag = #{key}") @Update("UPDATE leaf_alloc SET max_id = max_id + #{leafAlloc.step} WHERE biz_tag = #{leafAlloc.key}")
void updateMaxIdByCustomStep(@Param("leafAlloc") LeafAlloc leafAlloc); void updateMaxIdByCustomStep(@Param("leafAlloc") LeafAlloc leafAlloc);
@Select("SELECT biz_tag FROM leaf_alloc") @Select("SELECT biz_tag FROM leaf_alloc")

View File

@@ -0,0 +1,50 @@
package com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.impl;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocDao;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.model.LeafAlloc;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Slf4j
@Primary
@Repository
@RequiredArgsConstructor
public class IDAllocDaoImpl implements IDAllocDao {
private final IDAllocMapper idAllocMapper;
@PostConstruct
void logInit() {
log.info("IDAllocDaoImpl initialized as primary IDAllocDao implementation");
}
@Override
public List<LeafAlloc> getAllLeafAllocs() {
return idAllocMapper.getAllLeafAllocs();
}
@Override
@Transactional(rollbackFor = Exception.class)
public LeafAlloc updateMaxIdAndGetLeafAlloc(String tag) {
idAllocMapper.updateMaxId(tag);
return idAllocMapper.getLeafAlloc(tag);
}
@Override
@Transactional(rollbackFor = Exception.class)
public LeafAlloc updateMaxIdByCustomStepAndGetLeafAlloc(LeafAlloc leafAlloc) {
idAllocMapper.updateMaxIdByCustomStep(leafAlloc);
return idAllocMapper.getLeafAlloc(leafAlloc.getKey());
}
@Override
public List<String> getAllTags() {
return idAllocMapper.getAllTags();
}
}

View File

@@ -31,22 +31,23 @@ public class SnowflakeIDGenImpl implements IDGen {
private long lastTimestamp = -1L; private long lastTimestamp = -1L;
private static final Random RANDOM = new Random(); private static final Random RANDOM = new Random();
public SnowflakeIDGenImpl(String zkAddress, int port) { public SnowflakeIDGenImpl(String leafName, String zkAddress, int port) {
//Thu Nov 04 2010 09:42:54 GMT+0800 (中国标准时间) //Thu Nov 04 2010 09:42:54 GMT+0800 (中国标准时间)
this(zkAddress, port, 1288834974657L); this(leafName, zkAddress, port, 1288834974657L);
} }
/** /**
* @param leafName 区分集群的唯一名称
* @param zkAddress zk地址 * @param zkAddress zk地址
* @param port snowflake监听端口 * @param port snowflake监听端口
* @param twepoch 起始的时间戳 * @param twepoch 起始的时间戳
*/ */
public SnowflakeIDGenImpl(String zkAddress, int port, long twepoch) { public SnowflakeIDGenImpl(String leafName, String zkAddress, int port, long twepoch) {
this.twepoch = twepoch; this.twepoch = twepoch;
Preconditions.checkArgument(timeGen() > twepoch, "Snowflake not support twepoch gt currentTime"); Preconditions.checkArgument(timeGen() > twepoch, "Snowflake not support twepoch gt currentTime");
final String ip = Utils.getIp(); final String ip = Utils.getIp();
SnowflakeZookeeperHolder holder = new SnowflakeZookeeperHolder(ip, String.valueOf(port), zkAddress); SnowflakeZookeeperHolder holder = new SnowflakeZookeeperHolder(leafName, ip, String.valueOf(port), zkAddress);
LOGGER.info("twepoch:{} ,ip:{} ,zkAddress:{} port:{}", twepoch, ip, zkAddress, port); LOGGER.info("twepoch:{} ,leafName:{} ,ip:{} ,zkAddress:{} port:{}", twepoch, leafName, ip, zkAddress, port);
boolean initFlag = holder.init(); boolean initFlag = holder.init();
if (initFlag) { if (initFlag) {
workerId = holder.getWorkerID(); workerId = holder.getWorkerID();

View File

@@ -3,7 +3,6 @@ package com.hanserwei.hannote.distributed.id.generator.biz.core.snowflake;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.PropertyFactory;
import com.hanserwei.hannote.distributed.id.generator.biz.core.snowflake.exception.CheckLastTimeException; import com.hanserwei.hannote.distributed.id.generator.biz.core.snowflake.exception.CheckLastTimeException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.curator.RetryPolicy; import org.apache.curator.RetryPolicy;
@@ -27,18 +26,22 @@ import java.util.concurrent.TimeUnit;
public class SnowflakeZookeeperHolder { public class SnowflakeZookeeperHolder {
private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeZookeeperHolder.class); private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeZookeeperHolder.class);
private final String leafName;
private final String pathForever;//保存所有数据持久的节点
private final String propPath;
private String zk_AddressNode = null;//保存自身的key ip:port-000000001 private String zk_AddressNode = null;//保存自身的key ip:port-000000001
private String listenAddress = null;//保存自身的key ip:port private String listenAddress = null;//保存自身的key ip:port
private int workerID; private int workerID;
private static final String PREFIX_ZK_PATH = "/snowflake/" + PropertyFactory.getProperties().getProperty("leaf.name");
private static final String PROP_PATH = System.getProperty("java.io.tmpdir") + File.separator + PropertyFactory.getProperties().getProperty("leaf.name") + "/leafconf/{port}/workerID.properties";
private static final String PATH_FOREVER = PREFIX_ZK_PATH + "/forever";//保存所有数据持久的节点
private String ip; private String ip;
private String port; private String port;
private String connectionString; private String connectionString;
private long lastUpdateTime; private long lastUpdateTime;
public SnowflakeZookeeperHolder(String ip, String port, String connectionString) { public SnowflakeZookeeperHolder(String leafName, String ip, String port, String connectionString) {
this.leafName = leafName;
String prefixZkPath = "/snowflake/" + leafName;
this.pathForever = prefixZkPath + "/forever";
this.propPath = System.getProperty("java.io.tmpdir") + File.separator + leafName + "/leafconf/{port}/workerID.properties";
this.ip = ip; this.ip = ip;
this.port = port; this.port = port;
this.listenAddress = ip + ":" + port; this.listenAddress = ip + ":" + port;
@@ -47,22 +50,23 @@ public class SnowflakeZookeeperHolder {
public boolean init() { public boolean init() {
try { try {
LOGGER.info("Initializing SnowflakeZookeeperHolder for leafName={}, listenAddress={}, zkAddress={}", leafName, listenAddress, connectionString);
CuratorFramework curator = createWithOptions(connectionString, new RetryUntilElapsed(1000, 4), 10000, 6000); CuratorFramework curator = createWithOptions(connectionString, new RetryUntilElapsed(1000, 4), 10000, 6000);
curator.start(); curator.start();
Stat stat = curator.checkExists().forPath(PATH_FOREVER); Stat stat = curator.checkExists().forPath(pathForever);
if (stat == null) { if (stat == null) {
//不存在根节点,机器第一次启动,创建/snowflake/ip:port-000000000,并上传数据 //不存在根节点,机器第一次启动,创建/snowflake/ip:port-000000000,并上传数据
zk_AddressNode = createNode(curator); zk_AddressNode = createNode(curator);
//worker id 默认是0 //worker id 默认是0
updateLocalWorkerID(workerID); updateLocalWorkerID(workerID);
//定时上报本机时间给forever节点 //定时上报本机时间给forever节点
ScheduledUploadData(curator, zk_AddressNode); scheduleUploadData(curator, zk_AddressNode);
return true; return true;
} else { } else {
Map<String, Integer> nodeMap = Maps.newHashMap();//ip:port->00001 Map<String, Integer> nodeMap = Maps.newHashMap();//ip:port->00001
Map<String, String> realNode = Maps.newHashMap();//ip:port->(ipport-000001) Map<String, String> realNode = Maps.newHashMap();//ip:port->(ipport-000001)
//存在根节点,先检查是否有属于自己的根节点 //存在根节点,先检查是否有属于自己的根节点
List<String> keys = curator.getChildren().forPath(PATH_FOREVER); List<String> keys = curator.getChildren().forPath(pathForever);
for (String key : keys) { for (String key : keys) {
String[] nodeKey = key.split("-"); String[] nodeKey = key.split("-");
realNode.put(nodeKey[0], key); realNode.put(nodeKey[0], key);
@@ -71,7 +75,7 @@ public class SnowflakeZookeeperHolder {
Integer workerid = nodeMap.get(listenAddress); Integer workerid = nodeMap.get(listenAddress);
if (workerid != null) { if (workerid != null) {
//有自己的节点,zk_AddressNode=ip:port //有自己的节点,zk_AddressNode=ip:port
zk_AddressNode = PATH_FOREVER + "/" + realNode.get(listenAddress); zk_AddressNode = pathForever + "/" + realNode.get(listenAddress);
workerID = workerid;//启动worder时使用会使用 workerID = workerid;//启动worder时使用会使用
if (!checkInitTimeStamp(curator, zk_AddressNode)) { if (!checkInitTimeStamp(curator, zk_AddressNode)) {
throw new CheckLastTimeException("init timestamp check error,forever node timestamp gt this node time"); throw new CheckLastTimeException("init timestamp check error,forever node timestamp gt this node time");
@@ -95,7 +99,7 @@ public class SnowflakeZookeeperHolder {
LOGGER.error("Start node ERROR {}", e); LOGGER.error("Start node ERROR {}", e);
try { try {
Properties properties = new Properties(); Properties properties = new Properties();
properties.load(new FileInputStream(new File(PROP_PATH.replace("{port}", port + "")))); properties.load(new FileInputStream(new File(propPath.replace("{port}", port + ""))));
workerID = Integer.valueOf(properties.getProperty("workerID")); workerID = Integer.valueOf(properties.getProperty("workerID"));
LOGGER.warn("START FAILED ,use local node file properties workerID-{}", workerID); LOGGER.warn("START FAILED ,use local node file properties workerID-{}", workerID);
} catch (Exception e1) { } catch (Exception e1) {
@@ -107,10 +111,10 @@ public class SnowflakeZookeeperHolder {
} }
private void doService(CuratorFramework curator) { private void doService(CuratorFramework curator) {
ScheduledUploadData(curator, zk_AddressNode);// /snowflake_forever/ip:port-000000001 scheduleUploadData(curator, zk_AddressNode);// /snowflake_forever/ip:port-000000001
} }
private void ScheduledUploadData(final CuratorFramework curator, final String zk_AddressNode) { private void scheduleUploadData(final CuratorFramework curator, final String zk_AddressNode) {
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override @Override
public Thread newThread(Runnable r) { public Thread newThread(Runnable r) {
@@ -143,7 +147,7 @@ public class SnowflakeZookeeperHolder {
*/ */
private String createNode(CuratorFramework curator) throws Exception { private String createNode(CuratorFramework curator) throws Exception {
try { try {
return curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT_SEQUENTIAL).forPath(PATH_FOREVER + "/" + listenAddress + "-", buildData().getBytes()); return curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT_SEQUENTIAL).forPath(pathForever + "/" + listenAddress + "-", buildData().getBytes());
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("create node error msg {} ", e.getMessage()); LOGGER.error("create node error msg {} ", e.getMessage());
throw e; throw e;
@@ -186,7 +190,7 @@ public class SnowflakeZookeeperHolder {
* @param workerID * @param workerID
*/ */
private void updateLocalWorkerID(int workerID) { private void updateLocalWorkerID(int workerID) {
File leafConfFile = new File(PROP_PATH.replace("{port}", port)); File leafConfFile = new File(propPath.replace("{port}", port));
boolean exists = leafConfFile.exists(); boolean exists = leafConfFile.exists();
LOGGER.info("file exists status is {}", exists); LOGGER.info("file exists status is {}", exists);
if (exists) { if (exists) {

View File

@@ -0,0 +1,21 @@
package com.hanserwei.hannote.distributed.id.generator.biz.model;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class SegmentBufferView {
private String key;
private long value0;
private int step0;
private long max0;
private long value1;
private int step1;
private long max1;
private int pos;
private boolean nextReady;
private boolean initOk;
}

View File

@@ -0,0 +1,51 @@
package com.hanserwei.hannote.distributed.id.generator.biz.service;
import com.hanserwei.hannote.distributed.id.generator.biz.config.LeafProperties;
import com.hanserwei.hannote.distributed.id.generator.biz.core.IDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.Result;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.ZeroIDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.SegmentIDGenImpl;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocDao;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@Slf4j
@Service
@RequiredArgsConstructor
public class SegmentService {
private final LeafProperties leafProperties;
private final IDAllocDao idAllocDao;
private IDGen idGen;
@PostConstruct
public void init() {
if (leafProperties.getSegment().isEnable()) {
SegmentIDGenImpl segmentIDGen = new SegmentIDGenImpl();
segmentIDGen.setDao(idAllocDao);
if (segmentIDGen.init()) {
this.idGen = segmentIDGen;
log.info("Segment Service Init Successfully");
} else {
throw new IllegalStateException("Segment Service Init Fail");
}
} else {
this.idGen = new ZeroIDGen();
log.info("Segment Service disabled, Zero ID Gen Service Init Successfully");
}
}
public Result getId(String key) {
if (idGen == null) {
throw new IllegalStateException("Segment Service not initialized");
}
return idGen.get(key);
}
public SegmentIDGenImpl getIdGen() {
return idGen instanceof SegmentIDGenImpl ? (SegmentIDGenImpl) idGen : null;
}
}

View File

@@ -0,0 +1,53 @@
package com.hanserwei.hannote.distributed.id.generator.biz.service;
import com.hanserwei.hannote.distributed.id.generator.biz.config.LeafProperties;
import com.hanserwei.hannote.distributed.id.generator.biz.core.IDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.Result;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.ZeroIDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.snowflake.SnowflakeIDGenImpl;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
@Slf4j
@Service
@RequiredArgsConstructor
public class SnowflakeService {
private final LeafProperties leafProperties;
private IDGen idGen;
@PostConstruct
public void init() {
if (leafProperties.getSnowflake().isEnable()) {
String zkAddress = leafProperties.getSnowflake().getZkAddress();
if (!StringUtils.hasText(zkAddress)) {
throw new IllegalStateException("Snowflake Service Init Fail: zk address is required");
}
int port = leafProperties.getSnowflake().getPort();
if (port <= 0) {
throw new IllegalStateException("Snowflake Service Init Fail: port must be positive");
}
SnowflakeIDGenImpl snowflakeIDGen = new SnowflakeIDGenImpl(leafProperties.getName(), zkAddress, port);
if (snowflakeIDGen.init()) {
this.idGen = snowflakeIDGen;
log.info("Snowflake Service Init Successfully with zkAddress={} and port={}", zkAddress, port);
} else {
throw new IllegalStateException("Snowflake Service Init Fail");
}
} else {
this.idGen = new ZeroIDGen();
log.info("Snowflake Service disabled, Zero ID Gen Service Init Successfully");
}
}
public Result getId(String key) {
if (idGen == null) {
throw new IllegalStateException("Snowflake Service not initialized");
}
return idGen.get(key);
}
}

View File

@@ -0,0 +1,6 @@
spring:
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/leaf?useUnicode=true&characterEncoding=utf-8&autoReconnect=true&useSSL=false&serverTimezone=Asia/Shanghai
username: root
password: mysql

View File

@@ -0,0 +1,11 @@
spring:
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: ${LEAF_DB_URL:jdbc:mysql://db-host:3306/leaf?useUnicode=true&characterEncoding=utf-8&serverTimezone=Asia/Shanghai}
username: ${LEAF_DB_USERNAME:leaf_user}
password: ${LEAF_DB_PASSWORD:leaf_password}
leaf:
snowflake:
zk-address: "${LEAF_SNOWFLAKE_ZK_ADDRESS:zk.example.com:2181}"
port: ${LEAF_SNOWFLAKE_PORT:2222}

View File

@@ -0,0 +1,25 @@
server:
port: 8085 # 项目启动的端口
spring:
profiles:
active: dev # 默认激活 dev 本地开发环境
datasource:
driver-class-name: ${LEAF_JDBC_DRIVER_CLASS_NAME:com.mysql.cj.jdbc.Driver}
url: ${LEAF_JDBC_URL:jdbc:mysql://127.0.0.1:3306/leaf?useUnicode=true&characterEncoding=utf-8&autoReconnect=true&useSSL=false&serverTimezone=Asia/Shanghai}
username: ${LEAF_JDBC_USERNAME:root}
password: ${LEAF_JDBC_PASSWORD:mysql}
leaf:
name: ${LEAF_NAME:han-note-leaf}
jdbc:
driver-class-name: ${LEAF_JDBC_DRIVER_CLASS_NAME:com.mysql.cj.jdbc.Driver}
url: ${LEAF_JDBC_URL:jdbc:mysql://127.0.0.1:3306/leaf?useUnicode=true&characterEncoding=utf-8&autoReconnect=true&useSSL=false&serverTimezone=Asia/Shanghai}
username: ${LEAF_JDBC_USERNAME:root}
password: ${LEAF_JDBC_PASSWORD:mysql}
segment:
enable: ${LEAF_SEGMENT_ENABLE:true}
snowflake:
enable: ${LEAF_SNOWFLAKE_ENABLE:true}
zk-address: "${LEAF_SNOWFLAKE_ZK_ADDRESS:127.0.0.1:2181}"
port: ${LEAF_SNOWFLAKE_PORT:2222}

View File

@@ -1,11 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class HannoteDistributedIdGeneratorBizApplication {
public static void main(String[] args) {
SpringApplication.run(HannoteDistributedIdGeneratorBizApplication.class, args);
}
}

View File

@@ -1,12 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.constant;
public class Constants {
public static final String LEAF_SEGMENT_ENABLE = "leaf.segment.enable";
public static final String LEAF_JDBC_URL = "leaf.jdbc.url";
public static final String LEAF_JDBC_USERNAME = "leaf.jdbc.username";
public static final String LEAF_JDBC_PASSWORD = "leaf.jdbc.password";
public static final String LEAF_SNOWFLAKE_ENABLE = "leaf.snowflake.enable";
public static final String LEAF_SNOWFLAKE_PORT = "leaf.snowflake.port";
public static final String LEAF_SNOWFLAKE_ZK_ADDRESS = "leaf.snowflake.zk.address";
}

View File

@@ -1,22 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.core.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Properties;
public class PropertyFactory {
private static final Logger logger = LoggerFactory.getLogger(PropertyFactory.class);
private static final Properties prop = new Properties();
static {
try {
prop.load(PropertyFactory.class.getClassLoader().getResourceAsStream("leaf.properties"));
} catch (IOException e) {
logger.warn("Load Properties Ex", e);
}
}
public static Properties getProperties() {
return prop;
}
}

View File

@@ -1,75 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.impl;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocDao;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.model.LeafAlloc;
import org.apache.ibatis.mapping.Environment;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.ibatis.transaction.TransactionFactory;
import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;
import javax.sql.DataSource;
import java.util.List;
public class IDAllocDaoImpl implements IDAllocDao {
SqlSessionFactory sqlSessionFactory;
public IDAllocDaoImpl(DataSource dataSource) {
TransactionFactory transactionFactory = new JdbcTransactionFactory();
Environment environment = new Environment("development", transactionFactory, dataSource);
Configuration configuration = new Configuration(environment);
configuration.addMapper(IDAllocMapper.class);
sqlSessionFactory = new SqlSessionFactoryBuilder().build(configuration);
}
@Override
public List<LeafAlloc> getAllLeafAllocs() {
SqlSession sqlSession = sqlSessionFactory.openSession(false);
try {
return sqlSession.selectList("com.hanserwei.hannote.segment.dao.IDAllocMapper.getAllLeafAllocs");
} finally {
sqlSession.close();
}
}
@Override
public LeafAlloc updateMaxIdAndGetLeafAlloc(String tag) {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
sqlSession.update("com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper.updateMaxId", tag);
LeafAlloc result = sqlSession.selectOne("com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper.getLeafAlloc", tag);
sqlSession.commit();
return result;
} finally {
sqlSession.close();
}
}
@Override
public LeafAlloc updateMaxIdByCustomStepAndGetLeafAlloc(LeafAlloc leafAlloc) {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
sqlSession.update("com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper.updateMaxIdByCustomStep", leafAlloc);
LeafAlloc result = sqlSession.selectOne("com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper.getLeafAlloc", leafAlloc.getKey());
sqlSession.commit();
return result;
} finally {
sqlSession.close();
}
}
@Override
public List<String> getAllTags() {
SqlSession sqlSession = sqlSessionFactory.openSession(false);
try {
return sqlSession.selectList("com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocMapper.getAllTags");
} finally {
sqlSession.close();
}
}
}

View File

@@ -1,95 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.model;
public class SegmentBufferView {
private String key;
private long value0;
private int step0;
private long max0;
private long value1;
private int step1;
private long max1;
private int pos;
private boolean nextReady;
private boolean initOk;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public long getValue1() {
return value1;
}
public void setValue1(long value1) {
this.value1 = value1;
}
public int getStep1() {
return step1;
}
public void setStep1(int step1) {
this.step1 = step1;
}
public long getMax1() {
return max1;
}
public void setMax1(long max1) {
this.max1 = max1;
}
public long getValue0() {
return value0;
}
public void setValue0(long value0) {
this.value0 = value0;
}
public int getStep0() {
return step0;
}
public void setStep0(int step0) {
this.step0 = step0;
}
public long getMax0() {
return max0;
}
public void setMax0(long max0) {
this.max0 = max0;
}
public int getPos() {
return pos;
}
public void setPos(int pos) {
this.pos = pos;
}
public boolean isNextReady() {
return nextReady;
}
public void setNextReady(boolean nextReady) {
this.nextReady = nextReady;
}
public boolean isInitOk() {
return initOk;
}
public void setInitOk(boolean initOk) {
this.initOk = initOk;
}
}

View File

@@ -1,67 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.service;
import com.alibaba.druid.pool.DruidDataSource;
import com.hanserwei.hannote.distributed.id.generator.biz.constant.Constants;
import com.hanserwei.hannote.distributed.id.generator.biz.core.IDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.PropertyFactory;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.Result;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.ZeroIDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.SegmentIDGenImpl;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.IDAllocDao;
import com.hanserwei.hannote.distributed.id.generator.biz.core.segment.dao.impl.IDAllocDaoImpl;
import com.hanserwei.hannote.distributed.id.generator.biz.exception.InitException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.sql.SQLException;
import java.util.Properties;
@Service("SegmentService")
public class SegmentService {
private Logger logger = LoggerFactory.getLogger(SegmentService.class);
private IDGen idGen;
private DruidDataSource dataSource;
public SegmentService() throws SQLException, InitException {
Properties properties = PropertyFactory.getProperties();
boolean flag = Boolean.parseBoolean(properties.getProperty(Constants.LEAF_SEGMENT_ENABLE, "true"));
if (flag) {
// Config dataSource
dataSource = new DruidDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(properties.getProperty(Constants.LEAF_JDBC_URL));
dataSource.setUsername(properties.getProperty(Constants.LEAF_JDBC_USERNAME));
dataSource.setPassword(properties.getProperty(Constants.LEAF_JDBC_PASSWORD));
dataSource.setValidationQuery("select 1");
dataSource.init();
// Config Dao
IDAllocDao dao = new IDAllocDaoImpl(dataSource);
// Config ID Gen
idGen = new SegmentIDGenImpl();
((SegmentIDGenImpl) idGen).setDao(dao);
if (idGen.init()) {
logger.info("Segment Service Init Successfully");
} else {
throw new InitException("Segment Service Init Fail");
}
} else {
idGen = new ZeroIDGen();
logger.info("Zero ID Gen Service Init Successfully");
}
}
public Result getId(String key) {
return idGen.get(key);
}
public SegmentIDGenImpl getIdGen() {
if (idGen instanceof SegmentIDGenImpl) {
return (SegmentIDGenImpl) idGen;
}
return null;
}
}

View File

@@ -1,44 +0,0 @@
package com.hanserwei.hannote.distributed.id.generator.biz.service;
import com.hanserwei.hannote.distributed.id.generator.biz.constant.Constants;
import com.hanserwei.hannote.distributed.id.generator.biz.core.IDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.PropertyFactory;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.Result;
import com.hanserwei.hannote.distributed.id.generator.biz.core.common.ZeroIDGen;
import com.hanserwei.hannote.distributed.id.generator.biz.core.snowflake.SnowflakeIDGenImpl;
import com.hanserwei.hannote.distributed.id.generator.biz.exception.InitException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.util.Properties;
@Service("SnowflakeService")
public class SnowflakeService {
private Logger logger = LoggerFactory.getLogger(SnowflakeService.class);
private IDGen idGen;
public SnowflakeService() throws InitException {
Properties properties = PropertyFactory.getProperties();
boolean flag = Boolean.parseBoolean(properties.getProperty(Constants.LEAF_SNOWFLAKE_ENABLE, "true"));
if (flag) {
String zkAddress = properties.getProperty(Constants.LEAF_SNOWFLAKE_ZK_ADDRESS);
int port = Integer.parseInt(properties.getProperty(Constants.LEAF_SNOWFLAKE_PORT));
idGen = new SnowflakeIDGenImpl(zkAddress, port);
if(idGen.init()) {
logger.info("Snowflake Service Init Successfully");
} else {
throw new InitException("Snowflake Service Init Fail");
}
} else {
idGen = new ZeroIDGen();
logger.info("Zero ID Gen Service Init Successfully");
}
}
public Result getId(String key) {
return idGen.get(key);
}
}

View File

@@ -1,5 +0,0 @@
spring:
cassandra:
keyspace-name: hannote
contact-points: 127.0.0.1
port: 9042

View File

@@ -1,6 +0,0 @@
server:
port: 8085 # 项目启动的端口
spring:
profiles:
active: dev # 默认激活 dev 本地开发环境

View File

@@ -1,12 +0,0 @@
leaf.name=com.sankuai.leaf.opensource.test
leaf.segment.enable=true
leaf.jdbc.url=jdbc:mysql://127.0.0.1:3306/leaf?useUnicode=true&characterEncoding=utf-8&autoReconnect=true&useSSL=false&serverTimezone=Asia/Shanghai
leaf.jdbc.username=root
leaf.jdbc.password=mysql
# ???? snowflake ??
leaf.snowflake.enable=true
# snowflake ???? zk ??
leaf.snowflake.zk.address=127.0.0.1:2181
# snowflake ??????????
leaf.snowflake.port=2222

View File

@@ -15,7 +15,7 @@
<!-- 子模块管理 --> <!-- 子模块管理 -->
<modules> <modules>
<module>han-note-distributed-id-generator-api</module> <module>han-note-distributed-id-generator-api</module>
<module>hannote-distributed-id-generator-biz</module> <module>han-note-distributed-id-generator-biz</module>
</modules> </modules>
<artifactId>han-note-distributed-id-generator</artifactId> <artifactId>han-note-distributed-id-generator</artifactId>

24
pom.xml
View File

@@ -56,6 +56,10 @@
<cos-api.version>5.6.227</cos-api.version> <cos-api.version>5.6.227</cos-api.version>
<feign-form.version>3.8.0</feign-form.version> <feign-form.version>3.8.0</feign-form.version>
<caffeine.version>3.2.2</caffeine.version> <caffeine.version>3.2.2</caffeine.version>
<common-io.version>2.20.0</common-io.version>
<perf4j.version>0.9.16</perf4j.version>
<curator-recipes.version>5.9.0</curator-recipes.version>
<zookeeper.version>3.9.4</zookeeper.version>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
@@ -220,6 +224,26 @@
<artifactId>feign-form</artifactId> <artifactId>feign-form</artifactId>
<version>${feign-form.version}</version> <version>${feign-form.version}</version>
</dependency> </dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${common-io.version}</version>
</dependency>
<dependency>
<groupId>org.perf4j</groupId>
<artifactId>perf4j</artifactId>
<version>${perf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator-recipes.version}</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.hanserwei</groupId> <groupId>com.hanserwei</groupId>
<artifactId>han-note-user-api</artifactId> <artifactId>han-note-user-api</artifactId>