!1096 Snowflake增加方法:根据传入时间戳,计算ID起终点 (gitee/issues/I60M14)

* Snowflake add method:根据传入时间戳-计算ID起终点 (gitee/issues/I60M14)
This commit is contained in:
晚安,地球 2023-10-25 03:21:40 +00:00 committed by Looly
parent 7d80f1aa14
commit fe37c1aa84
2 changed files with 83 additions and 0 deletions

View File

@ -198,6 +198,40 @@ public class Snowflake implements Serializable {
return (id >> TIMESTAMP_LEFT_SHIFT & ~(-1L << 41L)) + twepoch;
}
/**
* 根据传入时间戳-计算ID起终点
*
* @param timestampStart 开始时间戳
* @param timestampEnd 结束时间戳
* @return key-ID起点Value-ID终点
* @since 5.8.24
*/
public Pair<Long, Long> getIdScopeByTimestamp(long timestampStart, long timestampEnd) {
return getIdScopeByTimestamp(timestampStart, timestampEnd, true);
}
/**
* 根据传入时间戳-计算ID起终点 Gitee/issues/I60M14
*
* @param timestampStart 开始时间戳
* @param timestampEnd 结束时间戳
* @param ignoreCenterAndWorker 是否忽略数据中心和机器节点的占位忽略后可获得分布式环境全局可信赖的起终点
* @return key-ID起点Value-ID终点
* @since 5.8.24
*/
public Pair<Long, Long> getIdScopeByTimestamp(long timestampStart, long timestampEnd, boolean ignoreCenterAndWorker) {
long startTimeMinId = (timestampStart - twepoch) << TIMESTAMP_LEFT_SHIFT;
long endTimeMinId = (timestampEnd - twepoch) << TIMESTAMP_LEFT_SHIFT;
if (ignoreCenterAndWorker) {
long endId = endTimeMinId | ~(-1 << TIMESTAMP_LEFT_SHIFT);
return Pair.of(startTimeMinId, endId);
} else {
long startId = startTimeMinId | (dataCenterId << DATA_CENTER_ID_SHIFT) | (workerId << WORKER_ID_SHIFT);
long endId = endTimeMinId | (dataCenterId << DATA_CENTER_ID_SHIFT) | (workerId << WORKER_ID_SHIFT) | SEQUENCE_MASK;
return Pair.of(startId, endId);
}
}
/**
* 下一个ID
*

View File

@ -4,6 +4,7 @@ import cn.hutool.core.collection.ConcurrentHashSet;
import cn.hutool.core.exceptions.UtilException;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.core.util.StrUtil;
import org.junit.Assert;
import org.junit.Ignore;
@ -19,6 +20,54 @@ import java.util.Set;
*/
public class SnowflakeTest {
/**
* 测试-根据传入时间戳-计算ID起终点
*/
@Test
public void snowflakeTestGetIdScope() {
long workerId = RandomUtil.randomLong(31);
long dataCenterId = RandomUtil.randomLong(31);
Snowflake idWorker = new Snowflake(workerId, dataCenterId);
long generatedId = idWorker.nextId();
// 随机忽略数据中心和工作机器的占位
boolean ignore = RandomUtil.randomBoolean();
long createTimestamp = idWorker.getGenerateDateTime(generatedId);
Pair<Long, Long> idScope = idWorker.getIdScopeByTimestamp(createTimestamp, createTimestamp, ignore);
long startId = idScope.getKey();
long endId = idScope.getValue();
System.out.println(longToBinaryReadable(generatedId) + " = generatedId longToBinaryReadable");
System.out.println(longToBinaryReadable(startId) + " = startId longToBinaryReadable");
System.out.println(longToBinaryReadable(endId) + " = endId longToBinaryReadable");
// 起点终点相差比较
long trueOffSet = endId - startId;
// 忽略数据中心和工作机器时差值为22个1否则为12个1
long expectedOffSet = ignore ? ~(-1 << 22) : ~(-1 << 12);
System.out.println("计算差值 = " + trueOffSet + ", 预期差值 = " + expectedOffSet);
Assert.assertEquals(trueOffSet, expectedOffSet);
}
/**
* long转雪花格式的2进制字符串
*
* @param number long值
* @return 符号位1bit- 时间戳相对值41bit- 数据中心标志5bit- 机器标志5bit- 递增序号12bit
*/
private String longToBinaryReadable(long number) {
String binaryString = Long.toBinaryString(number);
StringBuilder sb = new StringBuilder(binaryString);
while (sb.length() < 64) {
sb.insert(0, '0'); // 在二进制字符串前面补零
}
sb
.insert(52, "-")
.insert(47, "-")
.insert(42, "-")
.insert(1, "-")
;
return sb.toString();
}
@Test
public void snowflakeTest1(){
//构建Snowflake提供终端ID和数据中心ID