You've already forked FrameTour-BE
refactor(utils): 替换雪花ID生成工具实现
- 移除自定义的雪花ID生成逻辑 - 引入Hutool的Snowflake工具类 - 简化ID生成方法,提高代码可维护性 - 移除相关的测试类文件 - 删除不再使用的UniqueId和UniqueIdMetaData模型类
This commit is contained in:
@@ -1,50 +0,0 @@
|
||||
package com.ycwl.basic.model.snowFlake;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author Created by liuhongguang on 2019年10月27日
|
||||
* @Description
|
||||
*/
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Data
|
||||
public class UniqueId implements Serializable {
|
||||
|
||||
/**
|
||||
* 0 + 41 + 5 + 5 + 12
|
||||
* 固定 + 时间戳 + 工作机器ID + 数据中心ID + 序列号
|
||||
*/
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 8632670752020316524L;
|
||||
|
||||
/**
|
||||
* 工作机器ID、数据中心ID、序列号、上次生成ID的时间戳
|
||||
*/
|
||||
// 机器ID
|
||||
private long machineId;
|
||||
|
||||
// 数据中心ID
|
||||
private long datacenterId;
|
||||
|
||||
// 毫秒内序列
|
||||
private long sequence;
|
||||
|
||||
// 时间戳
|
||||
private long timestamp;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "UniqueIdRespVo{" +
|
||||
"服务机器ID=" + machineId +
|
||||
", 数据中心ID=" + datacenterId +
|
||||
", 毫秒内的序列=" + sequence +
|
||||
", 生成时间与预设时间戳间隔=" + timestamp +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
package com.ycwl.basic.model.snowFlake;
|
||||
|
||||
|
||||
public class UniqueIdMetaData {
|
||||
/**
|
||||
* 取当前系统启动时间为参考起始时间,
|
||||
* 取1995-04-01为参考日
|
||||
*/
|
||||
// public static final long START_TIME = LocalDateTime.now().toInstant(ZoneOffset.UTC).toEpochMilli();
|
||||
public static final long START_TIME = 796665600000L;
|
||||
|
||||
/**
|
||||
* 机器ID所占位数
|
||||
*/
|
||||
// 机器位数
|
||||
public static final long MACHINE_ID_BITS = 5L;
|
||||
|
||||
/**
|
||||
* 机器ID最大值31,0-31
|
||||
*/
|
||||
// 机器ID最大
|
||||
public static final long MAX_MACHINE_ID = ~(-1L << MACHINE_ID_BITS);
|
||||
|
||||
/**
|
||||
* 数据中心ID所占位数
|
||||
*/
|
||||
// 数据中心ID所占位数
|
||||
public static final long DATACENTER_ID_BITS = 5L;
|
||||
|
||||
/**
|
||||
* 数据中心ID最大值31,0-31
|
||||
*/
|
||||
// 数据中心ID最大值
|
||||
public static final long MAX_DATACENTER_ID = ~(-1L << MACHINE_ID_BITS);
|
||||
|
||||
/**
|
||||
* Sequence所占位数
|
||||
*/
|
||||
// 序列所占位数
|
||||
public static final long SEQUENCE_BITS = 12L;
|
||||
|
||||
/**
|
||||
* 机器ID偏移量12
|
||||
*/
|
||||
// 机器ID偏移量
|
||||
public static final long MACHINE_SHIFT_BITS = SEQUENCE_BITS;
|
||||
|
||||
/**
|
||||
* 数据中心ID偏移量12+5=17
|
||||
*/
|
||||
// 数据中心ID偏移量
|
||||
public static final long DATACENTER_SHIFT_BITS = SEQUENCE_BITS + MACHINE_ID_BITS;
|
||||
|
||||
/**
|
||||
* 时间戳的偏移量12+5+5=22
|
||||
*/
|
||||
// 时间戳偏移量
|
||||
public static final long TIMESTAMP_LEFT_SHIFT_BITS = SEQUENCE_BITS + MACHINE_ID_BITS + DATACENTER_ID_BITS;
|
||||
|
||||
/**
|
||||
* Sequence掩码4095
|
||||
*/
|
||||
// 序列掩码
|
||||
public static final long SEQUENCE_MASK = ~(-1L << SEQUENCE_BITS);
|
||||
|
||||
/**
|
||||
* 机器ID掩码1023
|
||||
*/
|
||||
// 机器ID掩码
|
||||
public static final long MACHINE_MASK = ~(-1L << MACHINE_ID_BITS);
|
||||
|
||||
/**
|
||||
* 数据中心掩码1023
|
||||
*/
|
||||
// 数据中心掩码
|
||||
public static final long DATACENTER_MASK = ~(-1L << MACHINE_ID_BITS);
|
||||
|
||||
/**
|
||||
* 时间戳掩码2的41次方减1
|
||||
*/
|
||||
// 时间戳掩码
|
||||
public static final long TIMESTAMP_MASK = ~(-1L << 41L);
|
||||
|
||||
}
|
||||
@@ -1,138 +1,28 @@
|
||||
package com.ycwl.basic.utils;
|
||||
|
||||
|
||||
import com.ycwl.basic.model.snowFlake.UniqueId;
|
||||
import com.ycwl.basic.model.snowFlake.UniqueIdMetaData;
|
||||
import cn.hutool.core.lang.Snowflake;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @author Created by liuhongguang
|
||||
* @Description
|
||||
*/
|
||||
@Slf4j
|
||||
@Component
|
||||
public class SnowFlakeUtil {
|
||||
|
||||
/**
|
||||
* 记录上一毫秒数
|
||||
*/
|
||||
private static long lastTimestamp = -1L;
|
||||
private static final Long machineId = 1L;
|
||||
|
||||
/**
|
||||
* 记录毫秒内的序列,0-4095
|
||||
*/
|
||||
private static long sequence = 0L;
|
||||
private static final Long datacenterId =1L;
|
||||
|
||||
private static Long machineId = 1L;
|
||||
private static final Snowflake snowflake = new Snowflake(null, machineId, datacenterId, true);
|
||||
|
||||
private static Long datacenterId =1L;
|
||||
|
||||
|
||||
public static synchronized String getId() {
|
||||
long timestamp = System.currentTimeMillis();
|
||||
|
||||
// 如果当前时间小于上一次ID生成的时间戳,说明系统时钟被修改过,回退在上一次ID生成时间之前应当抛出异常!!!
|
||||
if (timestamp < lastTimestamp) {
|
||||
throw new IllegalStateException(
|
||||
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
|
||||
public static String getId() {
|
||||
return snowflake.nextIdStr();
|
||||
}
|
||||
|
||||
//如果是同一时间生成的,则进行毫秒内序列
|
||||
if (lastTimestamp == timestamp) {
|
||||
sequence = (sequence + 1) & UniqueIdMetaData.SEQUENCE_MASK;
|
||||
//毫秒内序列溢出
|
||||
if (sequence == 0) {
|
||||
//阻塞到下一个毫秒,获得新的时间戳
|
||||
timestamp = System.currentTimeMillis();
|
||||
while (timestamp <= lastTimestamp) {
|
||||
timestamp = System.currentTimeMillis();
|
||||
}
|
||||
return String.valueOf(timestamp);
|
||||
}
|
||||
}
|
||||
//时间戳改变,毫秒内序列重置
|
||||
else {
|
||||
sequence = 0L;
|
||||
}
|
||||
|
||||
// 上次生成ID的时间截
|
||||
lastTimestamp = timestamp;
|
||||
|
||||
// 移位并通过或运算组成64位ID
|
||||
|
||||
return String.valueOf(((timestamp - UniqueIdMetaData.START_TIME) << UniqueIdMetaData.TIMESTAMP_LEFT_SHIFT_BITS)
|
||||
| (datacenterId << UniqueIdMetaData.DATACENTER_SHIFT_BITS)
|
||||
| (machineId<< UniqueIdMetaData.MACHINE_SHIFT_BITS)
|
||||
| sequence);
|
||||
}
|
||||
public static Long getLongId(){
|
||||
return Long.valueOf(getId());
|
||||
return snowflake.nextId();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public UniqueId explainId(long id) {
|
||||
UniqueId uniqueId = com.ycwl.basic.utils.SnowFlakeUtil.convert(id);
|
||||
if (uniqueId == null) {
|
||||
log.error("==> 解析ID失败, ID不合法");
|
||||
return null;
|
||||
}
|
||||
return uniqueId;
|
||||
}
|
||||
|
||||
|
||||
public Date transTime(long time) {
|
||||
return new Date(time + UniqueIdMetaData.START_TIME);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 唯一ID对象解析返回ID
|
||||
*
|
||||
* @param uniqueId
|
||||
* @return
|
||||
*/
|
||||
public static long convert(UniqueId uniqueId) {
|
||||
long result = 0;
|
||||
try {
|
||||
|
||||
result |= uniqueId.getSequence();
|
||||
|
||||
result |= uniqueId.getMachineId() << UniqueIdMetaData.MACHINE_SHIFT_BITS;
|
||||
|
||||
result |= uniqueId.getDatacenterId() << UniqueIdMetaData.DATACENTER_SHIFT_BITS;
|
||||
|
||||
result |= uniqueId.getTimestamp() << UniqueIdMetaData.TIMESTAMP_LEFT_SHIFT_BITS;
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
return result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public static UniqueId convert(long id) {
|
||||
UniqueId uniqueId = null;
|
||||
try {
|
||||
uniqueId = new UniqueId();
|
||||
|
||||
uniqueId.setSequence(id & UniqueIdMetaData.SEQUENCE_MASK);
|
||||
|
||||
uniqueId.setMachineId((id >>> UniqueIdMetaData.MACHINE_SHIFT_BITS) & UniqueIdMetaData.MACHINE_MASK);
|
||||
|
||||
uniqueId.setDatacenterId((id >>> UniqueIdMetaData.DATACENTER_SHIFT_BITS) & UniqueIdMetaData.DATACENTER_MASK);
|
||||
|
||||
uniqueId.setTimestamp((id >>> UniqueIdMetaData.TIMESTAMP_LEFT_SHIFT_BITS) & UniqueIdMetaData.TIMESTAMP_MASK);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
return uniqueId;
|
||||
}
|
||||
return uniqueId;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
package com.ycwl.basic.utils;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
/**
|
||||
* Test for SnowFlakeUtil to verify uniqueness under high concurrency.
|
||||
*/
|
||||
public class SnowFlakeUtilTest {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SnowFlakeUtilTest.class);
|
||||
|
||||
@Test
|
||||
public void testIdUniqueness() throws InterruptedException {
|
||||
// Number of threads
|
||||
int threadCount = 1000;
|
||||
// Number of IDs per thread
|
||||
int idCountPerThread = 10000;
|
||||
// Total IDs expected
|
||||
int totalIdCount = threadCount * idCountPerThread;
|
||||
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
|
||||
final Set<String> idSet = ConcurrentHashMap.newKeySet();
|
||||
final CountDownLatch latch = new CountDownLatch(threadCount);
|
||||
|
||||
// Use a set to capture any "suspicious" short IDs (potential raw timestamps)
|
||||
final Set<String> suspiciousIds = ConcurrentHashMap.newKeySet();
|
||||
|
||||
log.info("Starting concurrent snowflake ID generation test...");
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
executorService.execute(() -> {
|
||||
try {
|
||||
for (int j = 0; j < idCountPerThread; j++) {
|
||||
String id = SnowFlakeUtil.getId();
|
||||
idSet.add(id);
|
||||
|
||||
// Check for the potential overflow bug where it returns raw timestamp
|
||||
// A valid snowflake ID (around year 2025) should be much larger than a timestamp
|
||||
// Current timestamp is approx 13 digits (1734...)
|
||||
// Snowflake ID should be approx 18-19 digits
|
||||
if (id.length() < 15) {
|
||||
suspiciousIds.add(id);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error generating ID", e);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
latch.await();
|
||||
long end = System.currentTimeMillis();
|
||||
executorService.shutdown();
|
||||
|
||||
log.info("Generated {} IDs in {} ms", totalIdCount, (end - start));
|
||||
|
||||
if (!suspiciousIds.isEmpty()) {
|
||||
log.warn("Found {} suspicious IDs (likely raw timestamps due to sequence overflow): {}", suspiciousIds.size(), suspiciousIds);
|
||||
// We might not fail the test for this if the user only asked for uniqueness,
|
||||
// but it's good to report.
|
||||
// However, if they are raw timestamps, they collide heavily if generated in the same ms.
|
||||
}
|
||||
|
||||
Assertions.assertEquals(totalIdCount, idSet.size(), "Duplicate IDs found!");
|
||||
log.info("Uniqueness test passed. Total unique IDs: {}", idSet.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPerformanceSingleThread() {
|
||||
long start = System.currentTimeMillis();
|
||||
int count = 100000;
|
||||
for (int i = 0; i < count; i++) {
|
||||
SnowFlakeUtil.getId();
|
||||
}
|
||||
long end = System.currentTimeMillis();
|
||||
log.info("Generated {} IDs in {} ms (Single Thread)", count, (end - start));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user