diff --git a/plugin/src/main/groovy/org/unify4j/common/SoftCache4j.java b/plugin/src/main/groovy/org/unify4j/common/SoftCache4j.java new file mode 100644 index 0000000..ea52463 --- /dev/null +++ b/plugin/src/main/groovy/org/unify4j/common/SoftCache4j.java @@ -0,0 +1,508 @@ +package org.unify4j.common; + +import org.unify4j.model.base.ConcurrentHashMapNullSafe; + +import java.lang.ref.WeakReference; +import java.util.*; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; + +/** + * A cache that holds items for a specified Time-To-Live (TTL) duration. + * Optionally, it supports Least Recently Used (LRU) eviction when a maximum size is specified. + * This implementation uses sentinel values to support null keys and values in a ConcurrentHashMapNullSafe. + * It utilizes a single background thread to manage purging of expired entries for all cache instances. + * + * @param the type of keys maintained by this cache + * @param the type of mapped values + */ +public class SoftCache4j implements Map { + + private final long ttlMillis; + private final int maxSize; + private final ConcurrentMap> cacheMap; + private final ReentrantLock lock = new ReentrantLock(); + private final Node head; + private final Node tail; + + private static final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); + + /** + * Constructs a TTLCache with the specified TTL. + * When constructed this way, there is no LRU size limitation, and the default cleanup interval is 60 seconds. + * + * @param ttlMillis the time-to-live in milliseconds for each cache entry + */ + public SoftCache4j(long ttlMillis) { + this(ttlMillis, -1, 60000); + } + + /** + * Constructs a TTLCache with the specified TTL and maximum size. + * When constructed this way, the default cleanup interval is 60 seconds. + * + * @param ttlMillis the time-to-live in milliseconds for each cache entry + * @param maxSize the maximum number of entries in the cache (-1 for unlimited) + */ + public SoftCache4j(long ttlMillis, int maxSize) { + this(ttlMillis, maxSize, 60000); + } + + /** + * Constructs a TTLCache with the specified TTL, maximum size, and cleanup interval. + * + * @param ttlMillis the time-to-live in milliseconds for each cache entry + * @param maxSize the maximum number of entries in the cache (-1 for unlimited) + * @param cleanupIntervalMillis the cleanup interval in milliseconds for purging expired entries + */ + public SoftCache4j(long ttlMillis, int maxSize, long cleanupIntervalMillis) { + if (ttlMillis < 1) { + throw new IllegalArgumentException("TTL must be at least 1 millisecond."); + } + if (cleanupIntervalMillis < 10) { + throw new IllegalArgumentException("cleanupIntervalMillis must be at least 10 milliseconds."); + } + this.ttlMillis = ttlMillis; + this.maxSize = maxSize; + this.cacheMap = new ConcurrentHashMapNullSafe<>(); + + // Initialize the doubly-linked list for LRU tracking + this.head = new Node<>(null, null); + this.tail = new Node<>(null, null); + head.next = tail; + tail.prev = head; + + // Schedule the purging task for this cache + schedulePurgeTask(cleanupIntervalMillis); + } + + @Override + public V put(K key, V value) { + long expiryTime = System.currentTimeMillis() + ttlMillis; + Node node = new Node<>(key, value); + CacheEntry newEntry = new CacheEntry<>(node, expiryTime); + CacheEntry oldEntry = cacheMap.put(key, newEntry); + + boolean acquired = lock.tryLock(); + try { + if (acquired) { + insertAtTail(node); + + if (maxSize > -1 && cacheMap.size() > maxSize) { + // Evict the least recently used entry + Node lruNode = head.next; + if (lruNode != tail) { + removeEntry(lruNode.key); + } + } + } + // If lock not acquired, skip LRU update for performance + } finally { + if (acquired) { + lock.unlock(); + } + } + + return oldEntry != null ? oldEntry.node.value : null; + } + + @SuppressWarnings({"unchecked"}) + @Override + public V get(Object key) { + CacheEntry entry = cacheMap.get(key); + if (entry == null) { + return null; + } + + long currentTime = System.currentTimeMillis(); + if (entry.expiryTime < currentTime) { + removeEntry((K) key); + return null; + } + + V value = entry.node.value; + + boolean acquired = lock.tryLock(); + try { + if (acquired) { + moveToTail(entry.node); + } + // If lock not acquired, skip LRU update for performance + } finally { + if (acquired) { + lock.unlock(); + } + } + + return value; + } + + @Override + public V remove(Object key) { + CacheEntry entry = cacheMap.remove(key); + if (entry != null) { + V value = entry.node.value; + lock.lock(); + try { + unlink(entry.node); + } finally { + lock.unlock(); + } + return value; + } + return null; + } + + @Override + public void clear() { + cacheMap.clear(); + lock.lock(); + try { + // Reset the linked list + head.next = tail; + tail.prev = head; + } finally { + lock.unlock(); + } + } + + @Override + public int size() { + return cacheMap.size(); + } + + @Override + public boolean isEmpty() { + return cacheMap.isEmpty(); + } + + @SuppressWarnings({"unchecked"}) + @Override + public boolean containsKey(Object key) { + CacheEntry entry = cacheMap.get(key); + if (entry == null) { + return false; + } + if (entry.expiryTime < System.currentTimeMillis()) { + removeEntry((K) key); + return false; + } + return true; + } + + @Override + public boolean containsValue(Object value) { + for (CacheEntry entry : cacheMap.values()) { + Object entryValue = entry.node.value; + if (Objects.equals(entryValue, value)) { + return true; + } + } + return false; + } + + @Override + public void putAll(Map m) { + for (Entry e : m.entrySet()) { + put(e.getKey(), e.getValue()); + } + } + + @SuppressWarnings({"NullableProblems"}) + @Override + public Set keySet() { + Set keys = new HashSet<>(); + for (CacheEntry entry : cacheMap.values()) { + K key = entry.node.key; + keys.add(key); + } + return keys; + } + + @SuppressWarnings({"NullableProblems"}) + @Override + public Collection values() { + List values = new ArrayList<>(); + for (CacheEntry entry : cacheMap.values()) { + V value = entry.node.value; + values.add(value); + } + return values; + } + + @SuppressWarnings({"NullableProblems"}) + @Override + public Set> entrySet() { + return new EntrySet(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Map)) return false; // covers null check too + Map other = (Map) o; + lock.lock(); + try { + return entrySet().equals(other.entrySet()); + } finally { + lock.unlock(); + } + } + + @Override + public int hashCode() { + lock.lock(); + try { + int hashCode = 1; + for (Node node = head.next; node != tail; node = node.next) { + Object key = node.key; + Object value = node.value; + hashCode = 31 * hashCode + (key == null ? 0 : key.hashCode()); + hashCode = 31 * hashCode + (value == null ? 0 : value.hashCode()); + } + return hashCode; + } finally { + lock.unlock(); + } + } + + @Override + public String toString() { + lock.lock(); + try { + StringBuilder sb = new StringBuilder(); + sb.append('{'); + Iterator> it = entrySet().iterator(); + while (it.hasNext()) { + Entry entry = it.next(); + sb.append(entry.getKey()).append('=').append(entry.getValue()); + if (it.hasNext()) { + sb.append(", "); + } + } + sb.append('}'); + return sb.toString(); + } finally { + lock.unlock(); + } + } + + /** + * Shuts down the shared scheduler. Call this method when your application is terminating. + * This will stop the background task that purges expired entries. + */ + public static void shutdown() { + scheduler.shutdown(); + } + + /** + * Schedules the purging task for this cache. + * This task runs at a fixed rate to remove expired entries from the cache. + * + * @param cleanupIntervalMillis the cleanup interval in milliseconds + */ + private void schedulePurgeTask(long cleanupIntervalMillis) { + WeakReference> cacheRef = new WeakReference<>(this); + PurgeTask purgeTask = new PurgeTask(cacheRef); + scheduler.scheduleAtFixedRate(purgeTask, cleanupIntervalMillis, cleanupIntervalMillis, TimeUnit.MILLISECONDS); + } + + /** + * Inner class for the purging task. + */ + private static class PurgeTask implements Runnable { + private final WeakReference> cacheRef; + private volatile boolean canceled = false; + + PurgeTask(WeakReference> cacheRef) { + this.cacheRef = cacheRef; + } + + @Override + public void run() { + SoftCache4j cache = cacheRef.get(); + if (cache == null) { + // Cache has been garbage collected; cancel the task + cancel(); + } else { + cache.purgeExpiredEntries(); + } + } + + private void cancel() { + if (!canceled) { + canceled = true; + // Remove this task from the scheduler + // Since we cannot remove the task directly, we rely on the scheduler to not keep strong references to canceled tasks + } + } + } + + // Inner class representing a node in the doubly-linked list. + private static class Node { + final K key; + V value; + Node prev; + Node next; + + Node(K key, V value) { + this.key = key; + this.value = value; + } + } + + // Inner class representing a cache entry with a value and expiration time. + private static class CacheEntry { + final Node node; + final long expiryTime; + + CacheEntry(Node node, long expiryTime) { + this.node = node; + this.expiryTime = expiryTime; + } + } + + /** + * Purges expired entries from this cache. + * This method iterates through the cache and removes entries that have exceeded their TTL. + */ + private void purgeExpiredEntries() { + long currentTime = System.currentTimeMillis(); + for (Iterator>> it = cacheMap.entrySet().iterator(); it.hasNext(); ) { + Map.Entry> entry = it.next(); + if (entry.getValue().expiryTime < currentTime) { + it.remove(); + lock.lock(); + try { + unlink(entry.getValue().node); + } finally { + lock.unlock(); + } + } + } + } + + /** + * Removes an entry from the cache. + * This method also unlinks the corresponding node from the LRU tracking list. + * + * @param cacheKey the cache key to remove + */ + private void removeEntry(K cacheKey) { + CacheEntry entry = cacheMap.remove(cacheKey); + if (entry != null) { + Node node = entry.node; + lock.lock(); + try { + unlink(node); + } finally { + lock.unlock(); + } + } + } + + /** + * Unlinks a node from the doubly-linked list. + * This method updates the pointers of the surrounding nodes to remove the specified node. + * + * @param node the node to unlink + */ + private void unlink(Node node) { + node.prev.next = node.next; + node.next.prev = node.prev; + node.prev = null; + node.next = null; + node.value = null; + } + + /** + * Moves a node to the tail of the list (most recently used position). + * This is used to update the LRU order when an entry is accessed. + * + * @param node the node to move + */ + private void moveToTail(Node node) { + // Unlink the node + node.prev.next = node.next; + node.next.prev = node.prev; + + // Insert at the tail + node.prev = tail.prev; + node.next = tail; + tail.prev.next = node; + tail.prev = node; + } + + /** + * Inserts a node at the tail of the list. + * This method is used to add new entries to the cache. + * + * @param node the node to insert + */ + private void insertAtTail(Node node) { + node.prev = tail.prev; + node.next = tail; + tail.prev.next = node; + tail.prev = node; + } + + /** + * Custom EntrySet implementation that allows iterator removal. + */ + private class EntrySet extends AbstractSet> { + + @SuppressWarnings({"NullableProblems"}) + @Override + public Iterator> iterator() { + return new EntryIterator(); + } + + @Override + public int size() { + return SoftCache4j.this.size(); + } + + @Override + public void clear() { + SoftCache4j.this.clear(); + } + } + + /** + * Custom Iterator for the EntrySet. + */ + private class EntryIterator implements Iterator> { + private final Iterator>> iterator; + private Entry> current; + + public EntryIterator() { + this.iterator = cacheMap.entrySet().iterator(); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public Entry next() { + current = iterator.next(); + K key = current.getValue().node.key; + V value = current.getValue().node.value; + return new AbstractMap.SimpleEntry<>(key, value); + } + + @Override + public void remove() { + if (current == null) { + throw new IllegalStateException(); + } + K cacheKey = current.getKey(); + removeEntry(cacheKey); + current = null; + } + } +} diff --git a/plugin/src/main/groovy/org/unify4j/model/base/AbstractConcurrentMapNullSafe.java b/plugin/src/main/groovy/org/unify4j/model/base/AbstractConcurrentMapNullSafe.java new file mode 100644 index 0000000..06b19ca --- /dev/null +++ b/plugin/src/main/groovy/org/unify4j/model/base/AbstractConcurrentMapNullSafe.java @@ -0,0 +1,440 @@ +package org.unify4j.model.base; + +import java.util.*; +import java.util.concurrent.ConcurrentMap; +import java.util.function.BiFunction; + +/** + * AbstractConcurrentNullSafeMap is an abstract class that provides a thread-safe implementation + * of ConcurrentMap and Map interfaces, allowing null keys and null values by using sentinel objects internally. + * + * @param The type of keys maintained by this map + * @param The type of mapped values + */ +public abstract class AbstractConcurrentMapNullSafe implements ConcurrentMap { + protected enum NullSentinel { + NULL_KEY, NULL_VALUE + } + + // Internal ConcurrentMap storing Objects + protected final ConcurrentMap internalMap; + + /** + * Constructs a new AbstractConcurrentNullSafeMap with the provided internal map. + * + * @param internalMap the internal ConcurrentMap to use + */ + protected AbstractConcurrentMapNullSafe(ConcurrentMap internalMap) { + this.internalMap = internalMap; + } + + protected Object maskNullKey(K key) { + return key == null ? NullSentinel.NULL_KEY : key; + } + + @SuppressWarnings("unchecked") + protected K unmaskNullKey(Object key) { + return key == NullSentinel.NULL_KEY ? null : (K) key; + } + + protected Object maskNullValue(V value) { + return value == null ? NullSentinel.NULL_VALUE : value; + } + + @SuppressWarnings("unchecked") + protected V unmaskNullValue(Object value) { + return value == NullSentinel.NULL_VALUE ? null : (V) value; + } + + @Override + public int size() { + return internalMap.size(); + } + + @Override + public boolean isEmpty() { + return internalMap.isEmpty(); + } + + @SuppressWarnings("unchecked") + @Override + public boolean containsKey(Object key) { + return internalMap.containsKey(maskNullKey((K) key)); + } + + @Override + public boolean containsValue(Object value) { + return value == null ? internalMap.containsValue(NullSentinel.NULL_VALUE) : internalMap.containsValue(value); + } + + @SuppressWarnings("unchecked") + @Override + public V get(Object key) { + Object val = internalMap.get(maskNullKey((K) key)); + return unmaskNullValue(val); + } + + @Override + public V put(K key, V value) { + Object prev = internalMap.put(maskNullKey(key), maskNullValue(value)); + return unmaskNullValue(prev); + } + + @SuppressWarnings("unchecked") + @Override + public V remove(Object key) { + Object prev = internalMap.remove(maskNullKey((K) key)); + return unmaskNullValue(prev); + } + + @Override + public void putAll(Map m) { + for (Entry entry : m.entrySet()) { + internalMap.put(maskNullKey(entry.getKey()), maskNullValue(entry.getValue())); + } + } + + @Override + public void clear() { + internalMap.clear(); + } + + @SuppressWarnings("unchecked") + @Override + public V getOrDefault(Object key, V defaultValue) { + Object val = internalMap.get(maskNullKey((K) key)); + return (val != null) ? unmaskNullValue(val) : defaultValue; + } + + @Override + public V putIfAbsent(K key, V value) { + Object prev = internalMap.putIfAbsent(maskNullKey(key), maskNullValue(value)); + return unmaskNullValue(prev); + } + + @SuppressWarnings("unchecked") + @Override + public boolean remove(Object key, Object value) { + return internalMap.remove(maskNullKey((K) key), maskNullValue((V) value)); + } + + @Override + public boolean replace(K key, V oldValue, V newValue) { + return internalMap.replace(maskNullKey(key), maskNullValue(oldValue), maskNullValue(newValue)); + } + + @Override + public V replace(K key, V value) { + Object prev = internalMap.replace(maskNullKey(key), maskNullValue(value)); + return unmaskNullValue(prev); + } + + @Override + public V computeIfAbsent(K key, java.util.function.Function mappingFunction) { + Object maskedKey = maskNullKey(key); + Object currentValue = internalMap.get(maskNullKey(key)); + if (currentValue != null && currentValue != NullSentinel.NULL_VALUE) { + // The key exists with a non-null value, so we don't compute + return unmaskNullValue(currentValue); + } + // The key doesn't exist or is mapped to null, so we should compute + V newValue = mappingFunction.apply(unmaskNullKey(maskedKey)); + if (newValue != null) { + Object result = internalMap.compute(maskedKey, (k, v) -> { + if (v != null && v != NullSentinel.NULL_VALUE) { + return v; // Another thread set a non-null value, so we keep it + } + return maskNullValue(newValue); + }); + return unmaskNullValue(result); + } else { + // If the new computed value is null, ensure no mapping exists + internalMap.remove(maskedKey); + return null; + } + } + + @Override + public V compute(K key, BiFunction remappingFunction) { + Object maskedKey = maskNullKey(key); + Object result = internalMap.compute(maskedKey, (k, v) -> { + V oldValue = unmaskNullValue(v); + V newValue = remappingFunction.apply(unmaskNullKey(k), oldValue); + return (newValue == null) ? null : maskNullValue(newValue); + }); + + return unmaskNullValue(result); + } + + @Override + public V merge(K key, V value, BiFunction remappingFunction) { + Objects.requireNonNull(remappingFunction); + Objects.requireNonNull(value); // Adjust based on whether you want to allow nulls + Object maskedKey = maskNullKey(key); + Object result = internalMap.merge(maskedKey, maskNullValue(value), (v1, v2) -> { + V unmaskV1 = unmaskNullValue(v1); + V unmaskV2 = unmaskNullValue(v2); + V newValue = remappingFunction.apply(unmaskV1, unmaskV2); + return (newValue == null) ? null : maskNullValue(newValue); + }); + + return unmaskNullValue(result); + } + + @SuppressWarnings({"NullableProblems"}) + @Override + public Collection values() { + Collection internalValues = internalMap.values(); + return new AbstractCollection() { + @SuppressWarnings("NullableProblems") + @Override + public Iterator iterator() { + Iterator it = internalValues.iterator(); + return new Iterator() { + @Override + public boolean hasNext() { + return it.hasNext(); + } + + @Override + public V next() { + return unmaskNullValue(it.next()); + } + + @Override + public void remove() { + it.remove(); + } + }; + } + + @Override + public int size() { + return internalValues.size(); + } + + @SuppressWarnings("unchecked") + @Override + public boolean contains(Object o) { + return internalMap.containsValue(maskNullValue((V) o)); + } + + @Override + public void clear() { + internalMap.clear(); + } + }; + } + + @SuppressWarnings("NullableProblems") + @Override + public Set keySet() { + Set internalKeys = internalMap.keySet(); + return new AbstractSet() { + @SuppressWarnings("NullableProblems") + @Override + public Iterator iterator() { + Iterator it = internalKeys.iterator(); + return new Iterator() { + @Override + public boolean hasNext() { + return it.hasNext(); + } + + @Override + public K next() { + return unmaskNullKey(it.next()); + } + + @Override + public void remove() { + it.remove(); + } + }; + } + + @Override + public int size() { + return internalKeys.size(); + } + + @SuppressWarnings("unchecked") + @Override + public boolean contains(Object o) { + return internalMap.containsKey(maskNullKey((K) o)); + } + + @SuppressWarnings("unchecked") + @Override + public boolean remove(Object o) { + return internalMap.remove(maskNullKey((K) o)) != null; + } + + @Override + public void clear() { + internalMap.clear(); + } + }; + } + + @SuppressWarnings("NullableProblems") + @Override + public Set> entrySet() { + Set> internalEntries = internalMap.entrySet(); + return new AbstractSet>() { + @SuppressWarnings("NullableProblems") + @Override + public Iterator> iterator() { + Iterator> it = internalEntries.iterator(); + return new Iterator>() { + @Override + public boolean hasNext() { + return it.hasNext(); + } + + @Override + public Entry next() { + Entry internalEntry = it.next(); + return new Entry() { + @Override + public K getKey() { + return unmaskNullKey(internalEntry.getKey()); + } + + @Override + public V getValue() { + return unmaskNullValue(internalEntry.getValue()); + } + + @Override + public V setValue(V value) { + Object oldValue = internalEntry.setValue(maskNullValue(value)); + return unmaskNullValue(oldValue); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof Entry)) return false; + Entry e = (Entry) o; + return Objects.equals(getKey(), e.getKey()) && + Objects.equals(getValue(), e.getValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(getKey()) ^ Objects.hashCode(getValue()); + } + + @Override + public String toString() { + return getKey() + "=" + getValue(); + } + }; + } + + @Override + public void remove() { + it.remove(); + } + }; + } + + @Override + public int size() { + return internalEntries.size(); + } + + @SuppressWarnings("unchecked") + @Override + public boolean contains(Object o) { + if (!(o instanceof Entry)) return false; + Entry e = (Entry) o; + Object val = internalMap.get(maskNullKey((K) e.getKey())); + return maskNullValue((V) e.getValue()).equals(val); + } + + @SuppressWarnings("unchecked") + @Override + public boolean remove(Object o) { + if (!(o instanceof Entry)) return false; + Entry e = (Entry) o; + return internalMap.remove(maskNullKey((K) e.getKey()), maskNullValue((V) e.getValue())); + } + + @Override + public void clear() { + internalMap.clear(); + } + }; + } + + /** + * Overrides the equals method to ensure proper comparison between two maps. + * Two maps are considered equal if they contain the same key-value mappings. + * + * @param o the object to be compared for equality with this map + * @return true if the specified object is equal to this map + */ + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Map)) return false; + Map other = (Map) o; + if (this.size() != other.size()) return false; + for (Entry entry : this.entrySet()) { + K key = entry.getKey(); + V value = entry.getValue(); + if (!other.containsKey(key)) return false; + Object otherValue = other.get(key); + if (!Objects.equals(value, otherValue)) return false; + } + return true; + } + + /** + * Overrides the hashCode method to ensure consistency with equals. + * The hash code of a map is defined to be the sum of the hash codes of each entry in the map. + * + * @return the hash code value for this map + */ + @Override + public int hashCode() { + int h = 0; + for (Entry entry : this.entrySet()) { + K key = entry.getKey(); + V value = entry.getValue(); + int keyHash = (key == null) ? 0 : key.hashCode(); + int valueHash = (value == null) ? 0 : value.hashCode(); + h += keyHash ^ valueHash; + } + return h; + } + + /** + * Overrides the toString method to provide a string representation of the map. + * The string representation consists of a list of key-value mappings in the order returned by the map's entrySet view's iterator, + * enclosed in braces ("{}"). Adjacent mappings are separated by the characters ", " (comma and space). + * + * @return a string representation of this map + */ + @Override + public String toString() { + Iterator> it = this.entrySet().iterator(); + if (!it.hasNext()) + return "{}"; + + StringBuilder sb = new StringBuilder(); + sb.append('{'); + for (; ; ) { + Entry e = it.next(); + K key = e.getKey(); + V value = e.getValue(); + sb.append(key == this ? "(this Map)" : key); + sb.append('='); + sb.append(value == this ? "(this Map)" : value); + if (!it.hasNext()) + return sb.append('}').toString(); + sb.append(',').append(' '); + } + } +} \ No newline at end of file diff --git a/plugin/src/main/groovy/org/unify4j/model/base/ConcurrentHashMapNullSafe.java b/plugin/src/main/groovy/org/unify4j/model/base/ConcurrentHashMapNullSafe.java new file mode 100644 index 0000000..4772879 --- /dev/null +++ b/plugin/src/main/groovy/org/unify4j/model/base/ConcurrentHashMapNullSafe.java @@ -0,0 +1,57 @@ +package org.unify4j.model.base; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * ConcurrentHashMapNullSafe is a thread-safe implementation of ConcurrentMap + * that allows null keys and null values by using sentinel objects internally. + *
+ * + * @param The type of keys maintained by this map + * @param The type of mapped values + */ +public class ConcurrentHashMapNullSafe extends AbstractConcurrentMapNullSafe { + /** + * Constructs a new, empty ConcurrentHashMapNullSafe with default initial capacity (16) and load factor (0.75). + */ + public ConcurrentHashMapNullSafe() { + super(new ConcurrentHashMap<>()); + } + + /** + * Constructs a new, empty ConcurrentHashMapNullSafe with the specified initial capacity and default load factor (0.75). + * + * @param initialCapacity the initial capacity. The implementation performs internal sizing + * to accommodate this many elements. + * @throws IllegalArgumentException if the initial capacity is negative. + */ + public ConcurrentHashMapNullSafe(int initialCapacity) { + super(new ConcurrentHashMap<>(initialCapacity)); + } + + /** + * Constructs a new, empty ConcurrentHashMapNullSafe with the specified initial capacity and load factor. + * + * @param initialCapacity the initial capacity. The implementation + * performs internal sizing to accommodate this many elements. + * @param loadFactor the load factor threshold, used to control resizing. + * Resizing may be performed when the average number of elements per + * bin exceeds this threshold. + * @throws IllegalArgumentException if the initial capacity is negative or the load factor is non-positive + */ + public ConcurrentHashMapNullSafe(int initialCapacity, float loadFactor) { + super(new ConcurrentHashMap<>(initialCapacity, loadFactor)); + } + + /** + * Constructs a new ConcurrentHashMapNullSafe with the same mappings as the specified map. + * + * @param m the map whose mappings are to be placed in this map + * @throws NullPointerException if the specified map is null + */ + public ConcurrentHashMapNullSafe(Map m) { + super(new ConcurrentHashMap<>()); + putAll(m); + } +} \ No newline at end of file